Skip to content

Commit

Permalink
[OpenAI] Add a sample for global batch (#31278)
Browse files Browse the repository at this point in the history
Add batch sample
  • Loading branch information
minhanh-phan authored Oct 16, 2024
1 parent e008167 commit 327e085
Show file tree
Hide file tree
Showing 5 changed files with 193 additions and 0 deletions.
63 changes: 63 additions & 0 deletions sdk/openai/openai/samples-dev/batch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
* @azsdk-weight 100
*/

import { AzureOpenAI, toFile } from "openai";
import { DefaultAzureCredential, getBearerTokenProvider } from "@azure/identity";

// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
import "dotenv/config";

export async function main() {
console.log("== Batch Chat Completions Sample ==");

const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });

const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;

// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});

// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);

// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);

// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();

console.log(fileContent);
}

// Clean up file
await client.files.del(file.id);
}

main().catch((err) => {
console.error("The sample encountered an error:", err);
});
2 changes: 2 additions & 0 deletions sdk/openai/openai/samples/v2-beta/javascript/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ These sample programs show how to use the JavaScript client libraries for Azure
| ----------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
| [audioTranscription.js][audiotranscription] | audio transcription. |
| [audioTranslation.js][audiotranslation] | audio translation. |
| [batch.js][batch] | create and retrieve batch content. |
| [chatCompletions.js][chatcompletions] | get chat completions. |
| [codeInterpreter.js][codeinterpreter] | interpreting code. |
| [completions.js][completions] | get completions. |
Expand Down Expand Up @@ -71,6 +72,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP

[audiotranscription]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/audioTranscription.js
[audiotranslation]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/audioTranslation.js
[batch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/batch.js
[chatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/chatCompletions.js
[codeinterpreter]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/codeInterpreter.js
[completions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/completions.js
Expand Down
64 changes: 64 additions & 0 deletions sdk/openai/openai/samples/v2-beta/javascript/batch.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
*/

const { AzureOpenAI, toFile } = require("openai");
const { DefaultAzureCredential, getBearerTokenProvider } = require("@azure/identity");

// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
require("dotenv/config");

async function main() {
console.log("== Batch Chat Completions Sample ==");

const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });

const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;

// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});

// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);

// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);

// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();

console.log(fileContent);
}

// Clean up file
await client.files.del(file.id);
}

main().catch((err) => {
console.error("The sample encountered an error:", err);
});

module.exports = { main };
2 changes: 2 additions & 0 deletions sdk/openai/openai/samples/v2-beta/typescript/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ These sample programs show how to use the TypeScript client libraries for Azure
| ----------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
| [audioTranscription.ts][audiotranscription] | audio transcription. |
| [audioTranslation.ts][audiotranslation] | audio translation. |
| [batch.ts][batch] | create and retrieve batch content. |
| [chatCompletions.ts][chatcompletions] | get chat completions. |
| [codeInterpreter.ts][codeinterpreter] | interpreting code. |
| [completions.ts][completions] | get completions. |
Expand Down Expand Up @@ -83,6 +84,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP

[audiotranscription]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/audioTranscription.ts
[audiotranslation]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/audioTranslation.ts
[batch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/batch.ts
[chatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/chatCompletions.ts
[codeinterpreter]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/codeInterpreter.ts
[completions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/completions.ts
Expand Down
62 changes: 62 additions & 0 deletions sdk/openai/openai/samples/v2-beta/typescript/src/batch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
*/

import { AzureOpenAI, toFile } from "openai";
import { DefaultAzureCredential, getBearerTokenProvider } from "@azure/identity";

// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
import "dotenv/config";

export async function main() {
console.log("== Batch Chat Completions Sample ==");

const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });

const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;

// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});

// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);

// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);

// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();

console.log(fileContent);
}

// Clean up file
await client.files.del(file.id);
}

main().catch((err) => {
console.error("The sample encountered an error:", err);
});

0 comments on commit 327e085

Please sign in to comment.