From 488cfc6bb23ac991c06231480896c19b6bed1d49 Mon Sep 17 00:00:00 2001 From: "-)" Date: Mon, 30 Sep 2024 16:14:12 +0800 Subject: [PATCH 1/4] [Improve][Transform] add LLM Model provider MICROSOFT --- docs/en/transform-v2/llm.md | 5 +- docs/zh/transform-v2/llm.md | 2 +- .../seatunnel/e2e/transform/TestLLMIT.java | 15 +++ .../resources/llm_microsoft_transform.conf | 75 +++++++++++++ .../src/test/resources/mockserver-config.json | 32 ++++++ .../transform/nlpmodel/ModelProvider.java | 1 + .../transform/nlpmodel/llm/LLMTransform.java | 12 ++ .../nlpmodel/llm/LLMTransformFactory.java | 5 +- .../llm/remote/microsoft/MicrosoftModel.java | 103 ++++++++++++++++++ .../transform/llm/LLMRequestJsonTest.java | 34 ++++++ 10 files changed, 280 insertions(+), 4 deletions(-) create mode 100644 seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/llm_microsoft_transform.conf create mode 100644 seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/remote/microsoft/MicrosoftModel.java diff --git a/docs/en/transform-v2/llm.md b/docs/en/transform-v2/llm.md index 8ee5a36a9ab..9e958cf6e8c 100644 --- a/docs/en/transform-v2/llm.md +++ b/docs/en/transform-v2/llm.md @@ -11,7 +11,7 @@ more. ## Options | name | type | required | default value | -|------------------------| ------ | -------- |---------------| +|------------------------|--------|----------|---------------| | model_provider | enum | yes | | | output_data_type | enum | no | String | | output_column_name | string | no | llm_output | @@ -28,7 +28,7 @@ more. ### model_provider The model provider to use. The available options are: -OPENAI, DOUBAO, KIMIAI, CUSTOM +OPENAI, DOUBAO, KIMIAI, MICROSOFT, CUSTOM ### output_data_type @@ -254,6 +254,7 @@ sink { } } ``` + ### Customize the LLM model ```hocon diff --git a/docs/zh/transform-v2/llm.md b/docs/zh/transform-v2/llm.md index c6f7aeefead..eff97dcbfbf 100644 --- a/docs/zh/transform-v2/llm.md +++ b/docs/zh/transform-v2/llm.md @@ -26,7 +26,7 @@ ### model_provider 要使用的模型提供者。可用选项为: -OPENAI、DOUBAO、KIMIAI、CUSTOM +OPENAI、DOUBAO、KIMIAI、MICROSOFT, CUSTOM ### output_data_type diff --git a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java index d98a5e7e333..75b4bf79203 100644 --- a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java +++ b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java @@ -18,7 +18,9 @@ package org.apache.seatunnel.e2e.transform; import org.apache.seatunnel.e2e.common.TestResource; +import org.apache.seatunnel.e2e.common.container.EngineType; import org.apache.seatunnel.e2e.common.container.TestContainer; +import org.apache.seatunnel.e2e.common.junit.DisabledOnContainer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; @@ -82,12 +84,25 @@ public void tearDown() throws Exception { } @TestTemplate + @DisabledOnContainer( + value = {}, + type = {EngineType.SPARK, EngineType.FLINK}) public void testLLMWithOpenAI(TestContainer container) throws IOException, InterruptedException { Container.ExecResult execResult = container.executeJob("/llm_openai_transform.conf"); Assertions.assertEquals(0, execResult.getExitCode()); } + @TestTemplate + @DisabledOnContainer( + value = {}, + type = {EngineType.SPARK, EngineType.FLINK}) + public void testLLMWithMicrosoft(TestContainer container) + throws IOException, InterruptedException { + Container.ExecResult execResult = container.executeJob("/llm_microsoft_transform.conf"); + Assertions.assertEquals(0, execResult.getExitCode()); + } + @TestTemplate public void testLLMWithOpenAIBoolean(TestContainer container) throws IOException, InterruptedException { diff --git a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/llm_microsoft_transform.conf b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/llm_microsoft_transform.conf new file mode 100644 index 00000000000..37205a3acad --- /dev/null +++ b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/llm_microsoft_transform.conf @@ -0,0 +1,75 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +###### +###### This config file is a demonstration of streaming processing in seatunnel config +###### + +env { + job.mode = "BATCH" +} + +source { + FakeSource { + row.num = 5 + schema = { + fields { + id = "int" + name = "string" + } + } + rows = [ + {fields = [1, "Jia Fan"], kind = INSERT} + {fields = [2, "Hailin Wang"], kind = INSERT} + {fields = [3, "Tomas"], kind = INSERT} + {fields = [4, "Eric"], kind = INSERT} + {fields = [5, "Guangdong Liu"], kind = INSERT} + ] + result_table_name = "fake" + } +} + +transform { + LLM { + source_table_name = "fake" + model_provider = MICROSOFT + model = gpt-35-turbo + api_key = sk-xxx + prompt = "Determine whether someone is Chinese or American by their name" + api_path = "http://mockserver:1080/openai/deployments/${model}/chat/completions?api-version=2024-02-01" + result_table_name = "llm_output" + } +} + +sink { + Assert { + source_table_name = "llm_output" + rules = + { + field_rules = [ + { + field_name = llm_output + field_type = string + field_value = [ + { + rule_type = NOT_NULL + } + ] + } + ] + } + } +} \ No newline at end of file diff --git a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/mockserver-config.json b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/mockserver-config.json index 44dd94396ed..ffdb409c9c8 100644 --- a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/mockserver-config.json +++ b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/resources/mockserver-config.json @@ -104,5 +104,37 @@ "Content-Type": "application/json" } } + }, + { + "httpRequest": { + "method": "POST", + "path": "/openai/deployments/gpt-35-turbo/chat/.*" + }, + "httpResponse": { + "body": { + "id": "chatcmpl-6v7mkQj980V1yBec6ETrKPRqFjNw9", + "object": "chat.completion", + "created": 1679072642, + "model": "gpt-35-turbo", + "usage": { + "prompt_tokens": 58, + "completion_tokens": 68, + "total_tokens": 126 + }, + "choices": [ + { + "message": { + "role": "assistant", + "content": "[\"Chinese\"]" + }, + "finish_reason": "stop", + "index": 0 + } + ] + }, + "headers": { + "Content-Type": "application/json" + } + } } ] diff --git a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/ModelProvider.java b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/ModelProvider.java index ce22bc5a6d2..31721377062 100644 --- a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/ModelProvider.java +++ b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/ModelProvider.java @@ -26,6 +26,7 @@ public enum ModelProvider { "https://ark.cn-beijing.volces.com/api/v3/embeddings"), QIANFAN("", "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings"), KIMIAI("https://api.moonshot.cn/v1/chat/completions", ""), + MICROSOFT("", ""), CUSTOM("", ""), LOCAL("", ""); diff --git a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransform.java b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransform.java index 08ae42e4436..069945951bc 100644 --- a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransform.java +++ b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransform.java @@ -31,6 +31,7 @@ import org.apache.seatunnel.transform.nlpmodel.llm.remote.Model; import org.apache.seatunnel.transform.nlpmodel.llm.remote.custom.CustomModel; import org.apache.seatunnel.transform.nlpmodel.llm.remote.kimiai.KimiAIModel; +import org.apache.seatunnel.transform.nlpmodel.llm.remote.microsoft.MicrosoftModel; import org.apache.seatunnel.transform.nlpmodel.llm.remote.openai.OpenAIModel; import lombok.NonNull; @@ -94,6 +95,17 @@ public void open() { LLMTransformConfig.CustomRequestConfig .CUSTOM_RESPONSE_PARSE)); break; + case MICROSOFT: + model = + new MicrosoftModel( + inputCatalogTable.getSeaTunnelRowType(), + outputDataType.getSqlType(), + config.get(LLMTransformConfig.INFERENCE_COLUMNS), + config.get(LLMTransformConfig.PROMPT), + config.get(LLMTransformConfig.MODEL), + config.get(LLMTransformConfig.API_KEY), + provider.usedLLMPath(config.get(LLMTransformConfig.API_PATH))); + break; case OPENAI: case DOUBAO: model = diff --git a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java index eda57e1275f..c9d761a328e 100644 --- a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java +++ b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java @@ -50,7 +50,10 @@ public OptionRule optionRule() { LLMTransformConfig.PROCESS_BATCH_SIZE) .conditional( LLMTransformConfig.MODEL_PROVIDER, - Lists.newArrayList(ModelProvider.OPENAI, ModelProvider.DOUBAO), + Lists.newArrayList( + ModelProvider.OPENAI, + ModelProvider.DOUBAO, + ModelProvider.MICROSOFT), LLMTransformConfig.API_KEY) .conditional( LLMTransformConfig.MODEL_PROVIDER, diff --git a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/remote/microsoft/MicrosoftModel.java b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/remote/microsoft/MicrosoftModel.java new file mode 100644 index 00000000000..b6362c41a31 --- /dev/null +++ b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/remote/microsoft/MicrosoftModel.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.transform.nlpmodel.llm.remote.microsoft; + +import org.apache.seatunnel.shade.com.fasterxml.jackson.core.type.TypeReference; +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.JsonNode; +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.node.ArrayNode; +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.node.ObjectNode; + +import org.apache.seatunnel.api.table.type.SeaTunnelRowType; +import org.apache.seatunnel.api.table.type.SqlType; +import org.apache.seatunnel.transform.nlpmodel.CustomConfigPlaceholder; +import org.apache.seatunnel.transform.nlpmodel.llm.remote.AbstractModel; + +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; + +import com.google.common.annotations.VisibleForTesting; + +import java.io.IOException; +import java.util.List; + +public class MicrosoftModel extends AbstractModel { + + private final CloseableHttpClient client; + private final String apiKey; + private final String model; + private final String apiPath; + + public MicrosoftModel( + SeaTunnelRowType rowType, + SqlType outputType, + List projectionColumns, + String prompt, + String model, + String apiKey, + String apiPath) { + super(rowType, outputType, projectionColumns, prompt); + this.model = model; + this.apiKey = apiKey; + this.apiPath = + CustomConfigPlaceholder.replacePlaceholders( + apiPath, CustomConfigPlaceholder.REPLACE_PLACEHOLDER_MODEL, model, null); + this.client = HttpClients.createDefault(); + } + + @Override + protected List chatWithModel(String prompt, String data) throws IOException { + HttpPost post = new HttpPost(apiPath); + post.setHeader("Authorization", "Bearer " + apiKey); + post.setHeader("Content-Type", "application/json"); + ObjectNode objectNode = createJsonNodeFromData(prompt, data); + post.setEntity(new StringEntity(OBJECT_MAPPER.writeValueAsString(objectNode), "UTF-8")); + post.setConfig( + RequestConfig.custom().setConnectTimeout(20000).setSocketTimeout(20000).build()); + CloseableHttpResponse response = client.execute(post); + String responseStr = EntityUtils.toString(response.getEntity()); + if (response.getStatusLine().getStatusCode() != 200) { + throw new IOException("Failed to chat with model, response: " + responseStr); + } + + JsonNode result = OBJECT_MAPPER.readTree(responseStr); + String resultData = result.get("choices").get(0).get("message").get("content").asText(); + return OBJECT_MAPPER.readValue( + convertData(resultData), new TypeReference>() {}); + } + + @VisibleForTesting + public ObjectNode createJsonNodeFromData(String prompt, String data) { + ObjectNode objectNode = OBJECT_MAPPER.createObjectNode(); + ArrayNode messages = objectNode.putArray("messages"); + messages.addObject().put("role", "system").put("content", prompt); + messages.addObject().put("role", "user").put("content", data); + return objectNode; + } + + @Override + public void close() throws IOException { + if (client != null) { + client.close(); + } + } +} diff --git a/seatunnel-transforms-v2/src/test/java/org/apache/seatunnel/transform/llm/LLMRequestJsonTest.java b/seatunnel-transforms-v2/src/test/java/org/apache/seatunnel/transform/llm/LLMRequestJsonTest.java index 91666c41391..870af980fe0 100644 --- a/seatunnel-transforms-v2/src/test/java/org/apache/seatunnel/transform/llm/LLMRequestJsonTest.java +++ b/seatunnel-transforms-v2/src/test/java/org/apache/seatunnel/transform/llm/LLMRequestJsonTest.java @@ -28,6 +28,7 @@ import org.apache.seatunnel.format.json.RowToJsonConverters; import org.apache.seatunnel.transform.nlpmodel.llm.remote.custom.CustomModel; import org.apache.seatunnel.transform.nlpmodel.llm.remote.kimiai.KimiAIModel; +import org.apache.seatunnel.transform.nlpmodel.llm.remote.microsoft.MicrosoftModel; import org.apache.seatunnel.transform.nlpmodel.llm.remote.openai.OpenAIModel; import org.junit.jupiter.api.Assertions; @@ -36,6 +37,7 @@ import com.google.common.collect.Lists; import java.io.IOException; +import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -130,6 +132,38 @@ void testKimiAIRequestJson() throws IOException { model.close(); } + @Test + void testMicrosoftRequestJson() throws Exception { + SeaTunnelRowType rowType = + new SeaTunnelRowType( + new String[] {"id", "name"}, + new SeaTunnelDataType[] {BasicType.INT_TYPE, BasicType.STRING_TYPE}); + MicrosoftModel model = + new MicrosoftModel( + rowType, + SqlType.STRING, + null, + "Determine whether someone is Chinese or American by their name", + "gpt-35-turbo", + "sk-xxx", + "https://api.moonshot.cn/openai/deployments/${model}/chat/completions?api-version=2024-02-01"); + Field apiPathField = model.getClass().getDeclaredField("apiPath"); + apiPathField.setAccessible(true); + String apiPath = (String) apiPathField.get(model); + Assertions.assertEquals( + "https://api.moonshot.cn/openai/deployments/gpt-35-turbo/chat/completions?api-version=2024-02-01", + apiPath); + + ObjectNode node = + model.createJsonNodeFromData( + "Determine whether someone is Chinese or American by their name", + "{\"id\":1, \"name\":\"John\"}"); + Assertions.assertEquals( + "{\"messages\":[{\"role\":\"system\",\"content\":\"Determine whether someone is Chinese or American by their name\"},{\"role\":\"user\",\"content\":\"{\\\"id\\\":1, \\\"name\\\":\\\"John\\\"}\"}]}", + OBJECT_MAPPER.writeValueAsString(node)); + model.close(); + } + @Test void testCustomRequestJson() throws IOException { SeaTunnelRowType rowType = From e7109cbae65a40490c6c30f975f8b453d4dc41a6 Mon Sep 17 00:00:00 2001 From: "-)" Date: Wed, 2 Oct 2024 13:35:52 +0800 Subject: [PATCH 2/4] [Improve][Transform] Optimize code --- docs/en/transform-v2/llm.md | 2 ++ docs/zh/transform-v2/llm.md | 2 ++ .../org/apache/seatunnel/e2e/transform/TestLLMIT.java | 8 -------- .../transform/nlpmodel/llm/LLMTransformFactory.java | 3 +-- 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/docs/en/transform-v2/llm.md b/docs/en/transform-v2/llm.md index 9e958cf6e8c..3ae950cd8a7 100644 --- a/docs/en/transform-v2/llm.md +++ b/docs/en/transform-v2/llm.md @@ -30,6 +30,8 @@ more. The model provider to use. The available options are: OPENAI, DOUBAO, KIMIAI, MICROSOFT, CUSTOM +> tips: If you use Microsoft, please api_path cannot be empty + ### output_data_type The data type of the output data. The available options are: diff --git a/docs/zh/transform-v2/llm.md b/docs/zh/transform-v2/llm.md index eff97dcbfbf..1ae4a3b73ef 100644 --- a/docs/zh/transform-v2/llm.md +++ b/docs/zh/transform-v2/llm.md @@ -28,6 +28,8 @@ 要使用的模型提供者。可用选项为: OPENAI、DOUBAO、KIMIAI、MICROSOFT, CUSTOM +> tips: 如果使用 Microsoft, api_path 配置不能为空 + ### output_data_type 输出数据的数据类型。可用选项为: diff --git a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java index 75b4bf79203..f739e7af965 100644 --- a/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java +++ b/seatunnel-e2e/seatunnel-transforms-v2-e2e/seatunnel-transforms-v2-e2e-part-1/src/test/java/org/apache/seatunnel/e2e/transform/TestLLMIT.java @@ -18,9 +18,7 @@ package org.apache.seatunnel.e2e.transform; import org.apache.seatunnel.e2e.common.TestResource; -import org.apache.seatunnel.e2e.common.container.EngineType; import org.apache.seatunnel.e2e.common.container.TestContainer; -import org.apache.seatunnel.e2e.common.junit.DisabledOnContainer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; @@ -84,9 +82,6 @@ public void tearDown() throws Exception { } @TestTemplate - @DisabledOnContainer( - value = {}, - type = {EngineType.SPARK, EngineType.FLINK}) public void testLLMWithOpenAI(TestContainer container) throws IOException, InterruptedException { Container.ExecResult execResult = container.executeJob("/llm_openai_transform.conf"); @@ -94,9 +89,6 @@ public void testLLMWithOpenAI(TestContainer container) } @TestTemplate - @DisabledOnContainer( - value = {}, - type = {EngineType.SPARK, EngineType.FLINK}) public void testLLMWithMicrosoft(TestContainer container) throws IOException, InterruptedException { Container.ExecResult execResult = container.executeJob("/llm_microsoft_transform.conf"); diff --git a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java index c9d761a328e..834c0b4d174 100644 --- a/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java +++ b/seatunnel-transforms-v2/src/main/java/org/apache/seatunnel/transform/nlpmodel/llm/LLMTransformFactory.java @@ -26,7 +26,6 @@ import org.apache.seatunnel.api.table.factory.TableTransformFactory; import org.apache.seatunnel.api.table.factory.TableTransformFactoryContext; import org.apache.seatunnel.transform.nlpmodel.ModelProvider; -import org.apache.seatunnel.transform.nlpmodel.ModelTransformConfig; import com.google.auto.service.AutoService; @@ -60,7 +59,7 @@ public OptionRule optionRule() { ModelProvider.QIANFAN, LLMTransformConfig.API_KEY, LLMTransformConfig.SECRET_KEY, - ModelTransformConfig.OAUTH_PATH) + LLMTransformConfig.OAUTH_PATH) .conditional( LLMTransformConfig.MODEL_PROVIDER, ModelProvider.CUSTOM, From 67d9a5db1619719d635f23645a32979df35ee0a3 Mon Sep 17 00:00:00 2001 From: Jia Fan Date: Sat, 5 Oct 2024 14:55:44 +0800 Subject: [PATCH 3/4] Update docs/en/transform-v2/llm.md --- docs/en/transform-v2/llm.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/transform-v2/llm.md b/docs/en/transform-v2/llm.md index 3ae950cd8a7..81dc9b3c703 100644 --- a/docs/en/transform-v2/llm.md +++ b/docs/en/transform-v2/llm.md @@ -30,7 +30,7 @@ more. The model provider to use. The available options are: OPENAI, DOUBAO, KIMIAI, MICROSOFT, CUSTOM -> tips: If you use Microsoft, please api_path cannot be empty +> tips: If you use Microsoft, please make sure api_path cannot be empty ### output_data_type From 26442cc76b23bee05dcf5fd5db23566b474da745 Mon Sep 17 00:00:00 2001 From: Jia Fan Date: Sat, 5 Oct 2024 14:55:53 +0800 Subject: [PATCH 4/4] Update docs/zh/transform-v2/llm.md --- docs/zh/transform-v2/llm.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/transform-v2/llm.md b/docs/zh/transform-v2/llm.md index 1ae4a3b73ef..5ab37f5870b 100644 --- a/docs/zh/transform-v2/llm.md +++ b/docs/zh/transform-v2/llm.md @@ -28,7 +28,7 @@ 要使用的模型提供者。可用选项为: OPENAI、DOUBAO、KIMIAI、MICROSOFT, CUSTOM -> tips: 如果使用 Microsoft, api_path 配置不能为空 +> tips: 如果使用 Microsoft, 请确保 api_path 配置不能为空 ### output_data_type