diff --git a/src/routes/embeddings.rs b/src/routes/embeddings.rs index 054fdd8..a3b48c6 100644 --- a/src/routes/embeddings.rs +++ b/src/routes/embeddings.rs @@ -17,15 +17,10 @@ use crate::state::{BackendTypes, State}; pub async fn embed( AxumState(state): AxumState>, mut request: Json, -) -> Response { +) -> Result> { if let Some(models) = state.config.models.get(&AiRouterModelType::Embeddings) { if let Some(model) = models.get(&request.model) { - let request_data = match AiRouterRequestData::build(model, &request.model, &state) { - Ok(d) => d, - Err(e) => { - return e.into_response(); - } - }; + let request_data = AiRouterRequestData::build(model, &request.model, &state)?; if let Some(backend_model) = model.backend_model.clone() { request.model = backend_model; } @@ -33,26 +28,31 @@ pub async fn embed( let model_backend = model.backend.as_ref().map_or("default", |m| m); let Some(backend) = state.backends.get(model_backend) else { - return AiRouterError::InternalServerError::(format!( + return Err(AiRouterError::InternalServerError::(format!( "backend {model_backend} not found" - )) - .into_response(); + ))); }; match &backend.client { BackendTypes::OpenAI(c) => { - return openai_routes::embeddings::embed(c.clone(), request, &request_data) - .await - .into_response(); + return Ok( + openai_routes::embeddings::embed(c.clone(), request, &request_data) + .await + .into_response(), + ); } BackendTypes::Triton(c) => { - return triton_routes::embeddings::embed(c.clone(), request, &request_data) - .await - .into_response(); + return Ok( + triton_routes::embeddings::embed(c.clone(), request, &request_data) + .await + .into_response(), + ); } } } } - return AiRouterError::ModelNotFound::(request.model.clone()).into_response(); + Err(AiRouterError::ModelNotFound::( + request.model.clone(), + )) }