Skip to content

Commit 5202bbd

Browse files
authored
chore: fix some integ tests (BoundaryML#2607)
openai batch is not a meaningful test, remove LLM deps for the others
1 parent af24513 commit 5202bbd

File tree

13 files changed

+963
-157
lines changed

13 files changed

+963
-157
lines changed

integ-tests/baml_src/clients.baml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -227,10 +227,11 @@ client<llm> AwsBedrock {
227227
}
228228
// max_tokens 100000
229229
// max_completion_tokens 100000
230-
model "anthropic.claude-3-5-sonnet-20240620-v1:0"
230+
model "us.anthropic.claude-3-5-haiku-20241022-v1:0"
231+
// model "anthropic.claude-3-5-sonnet-20240620-v1:0"
231232
// model_id "anthropic.claude-3-haiku-20240307-v1:0"
232233
//model "arn:aws:bedrock:us-east-1:404337120808:inference-profile/us.anthropic.claude-3-7-sonnet-20250219-v1:0"
233-
endpoint_url "https://bedrock-runtime.us-east-1.amazonaws.com/"
234+
// endpoint_url "https://bedrock-runtime.us-east-1.amazonaws.com/"
234235
// region "us-east-1"
235236
// access_key_id env.AWS_ACCESS_KEY_ID
236237
// secret_access_key env.AWS_SECRET_ACCESS_KEY

integ-tests/go/baml_client/baml_source_map.go

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/python-v1/baml_client/inlinedbaml.py

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/python/baml_client/inlinedbaml.py

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/react/baml_client/inlinedbaml.ts

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/typescript-esm/baml_client/inlinedbaml.ts

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/typescript/baml_client/inlinedbaml.ts

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

integ-tests/typescript/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
"@anthropic-ai/sdk": "0.39.0",
3737
"@aws-crypto/sha256-js": "^5.2.0",
3838
"@aws-sdk/client-sts": "3.721.0",
39+
"@aws-sdk/credential-provider-node": "^3.908.0",
3940
"@aws-sdk/credential-providers": "3.830.0",
4041
"@boundaryml/baml": "workspace:*",
4142
"@google/generative-ai": "0.24.0",

integ-tests/typescript/tests/modular.test.ts

Lines changed: 2 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import {
1010
GoogleGenerativeAI,
1111
} from "@google/generative-ai";
1212
import { SignatureV4 } from "@smithy/signature-v4";
13-
import { fromEnv } from "@aws-sdk/credential-providers";
13+
import { defaultProvider } from "@aws-sdk/credential-provider-node";
1414
import { HttpRequest } from "@smithy/protocol-http";
1515
import { Sha256 } from "@aws-crypto/sha256-js";
1616
import { HTTPRequest as BamlHttpRequest } from "@boundaryml/baml";
@@ -184,93 +184,6 @@ describe("Modular API Tests", () => {
184184
expect(parsed).toEqual(JOHN_DOE_PARSED_RESUME);
185185
});
186186

187-
it("openai batch api", async () => {
188-
const client = new OpenAI();
189-
190-
// Helper function to convert BAML HTTP request to OpenAI batch JSONL format
191-
const toOpenaiJsonl = (req: BamlHttpRequest): string => {
192-
const line = JSON.stringify({
193-
custom_id: req.id,
194-
method: "POST",
195-
url: "/v1/chat/completions",
196-
body: req.body.json(),
197-
});
198-
return `${line}\n`;
199-
};
200-
201-
// Create requests for both resumes
202-
const [johnReq, janeReq] = await Promise.all([
203-
b.request.ExtractResume2(JOHN_DOE_TEXT_RESUME),
204-
b.request.ExtractResume2(JANE_SMITH_TEXT_RESUME),
205-
]);
206-
207-
const jsonl = toOpenaiJsonl(johnReq) + toOpenaiJsonl(janeReq);
208-
209-
// Create batch input file
210-
const batchInputFile = await client.files.create({
211-
file: new File([jsonl], "batch.jsonl"),
212-
purpose: "batch",
213-
});
214-
215-
// Create batch
216-
let batch = await client.batches.create({
217-
input_file_id: batchInputFile.id,
218-
endpoint: "/v1/chat/completions",
219-
completion_window: "24h",
220-
metadata: {
221-
description: "BAML Modular API TypeScript Batch Integ Test",
222-
},
223-
});
224-
225-
let backoff = 1000; // milliseconds
226-
let attempts = 0;
227-
const maxAttempts = 30;
228-
229-
while (true) {
230-
batch = await client.batches.retrieve(batch.id);
231-
attempts += 1;
232-
233-
if (batch.status === "completed") {
234-
break;
235-
}
236-
237-
if (attempts >= maxAttempts) {
238-
try {
239-
await client.batches.cancel(batch.id);
240-
} finally {
241-
throw "Batch failed to complete in time";
242-
}
243-
}
244-
245-
await new Promise((resolve) => setTimeout(resolve, backoff));
246-
// backoff *= 2 // Exponential backoff
247-
}
248-
249-
// Get output file
250-
const output = await client.files.content(batch.output_file_id!);
251-
252-
// Process results
253-
const expected: Record<string, Resume> = {
254-
[johnReq.id]: JOHN_DOE_PARSED_RESUME,
255-
[janeReq.id]: JANE_SMITH_PARSED_RESUME,
256-
};
257-
258-
const received: Record<string, Resume> = {};
259-
const outputJsonl = await output.text();
260-
261-
for (const line of outputJsonl
262-
.split("\n")
263-
.filter((line) => line.trim().length > 0)) {
264-
const result = JSON.parse(line.trim());
265-
const llmResponse = result.response.body.choices[0].message.content;
266-
267-
const parsed = b.parse.ExtractResume2(llmResponse);
268-
received[result.custom_id] = parsed;
269-
}
270-
271-
expect(received).toEqual(expected);
272-
});
273-
274187
it("modular openai responses", async () => {
275188
// Test openai-responses provider using the modular API
276189
const client = new OpenAI();
@@ -318,7 +231,7 @@ describe("Modular API Tests", () => {
318231
const signer = new SignatureV4({
319232
service: "bedrock",
320233
region,
321-
credentials: fromEnv(),
234+
credentials: defaultProvider(),
322235
sha256: Sha256,
323236
});
324237

integ-tests/typescript/tests/providers/openai-responses.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ describe("OpenAI Provider", () => {
3333
role: "user",
3434
},
3535
],
36-
model: "gpt-4.1",
36+
model: "gpt-5-mini",
3737
});
3838
});
3939

@@ -51,7 +51,7 @@ describe("OpenAI Provider", () => {
5151
role: "user",
5252
},
5353
],
54-
model: "gpt-4.1",
54+
model: "gpt-5-mini",
5555
});
5656
});
5757

0 commit comments

Comments
 (0)