Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/tricky-glasses-open.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@google/generative-ai": minor
---

Introduce a new error type to represent AbortError from SDK
4 changes: 4 additions & 0 deletions common/api-review/generative-ai.api.md
Original file line number Diff line number Diff line change
Expand Up @@ -539,6 +539,10 @@ export class GoogleGenerativeAI {
getGenerativeModelFromCachedContent(cachedContent: CachedContent, modelParams?: Partial<ModelParams>, requestOptions?: RequestOptions): GenerativeModel;
}

// @public
export class GoogleGenerativeAIAbortError extends GoogleGenerativeAIError {
}

// @public
export class GoogleGenerativeAIError extends Error {
constructor(message: string);
Expand Down
15 changes: 15 additions & 0 deletions docs/reference/main/generative-ai.googlegenerativeaiaborterror.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<!-- Do not edit this file. It is automatically generated by API Documenter. -->

[Home](./index.md) &gt; [@google/generative-ai](./generative-ai.md) &gt; [GoogleGenerativeAIAbortError](./generative-ai.googlegenerativeaiaborterror.md)

## GoogleGenerativeAIAbortError class

Error thrown when a request is aborted, either due to a timeout or intentional cancellation by the user.

**Signature:**

```typescript
export declare class GoogleGenerativeAIAbortError extends GoogleGenerativeAIError
```
**Extends:** [GoogleGenerativeAIError](./generative-ai.googlegenerativeaierror.md)

1 change: 1 addition & 0 deletions docs/reference/main/generative-ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
| [ChatSession](./generative-ai.chatsession.md) | ChatSession class that enables sending chat messages and stores history of sent and received messages so far. |
| [GenerativeModel](./generative-ai.generativemodel.md) | Class for generative model APIs. |
| [GoogleGenerativeAI](./generative-ai.googlegenerativeai.md) | Top-level class for this SDK |
| [GoogleGenerativeAIAbortError](./generative-ai.googlegenerativeaiaborterror.md) | Error thrown when a request is aborted, either due to a timeout or intentional cancellation by the user. |
| [GoogleGenerativeAIError](./generative-ai.googlegenerativeaierror.md) | Basic error type for this SDK. |
| [GoogleGenerativeAIFetchError](./generative-ai.googlegenerativeaifetcherror.md) | Error class covering HTTP errors when calling the server. Includes HTTP status, statusText, and optional details, if provided in the server response. |
| [GoogleGenerativeAIRequestInputError](./generative-ai.googlegenerativeairequestinputerror.md) | Errors in the contents of a request originating from user input. |
Expand Down
7 changes: 7 additions & 0 deletions src/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,10 @@ export class GoogleGenerativeAIFetchError extends GoogleGenerativeAIError {
* @public
*/
export class GoogleGenerativeAIRequestInputError extends GoogleGenerativeAIError {}

/**
* Error thrown when a request is aborted, either due to a timeout or
* intentional cancellation by the user.
* @public
*/
export class GoogleGenerativeAIAbortError extends GoogleGenerativeAIError {}
26 changes: 25 additions & 1 deletion src/requests/request.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import {
makeModelRequest,
} from "./request";
import {
GoogleGenerativeAIAbortError,
GoogleGenerativeAIFetchError,
GoogleGenerativeAIRequestInputError,
} from "../errors";
Expand Down Expand Up @@ -228,7 +229,30 @@ describe("request methods", () => {
});
expect(response.ok).to.be.true;
});
it("error with timeout", async () => {
it("error with local timeout", async () => {
const abortError = new DOMException("Request timeout.", "AbortError");
const fetchStub = stub().rejects(abortError);

try {
await makeModelRequest(
"model-name",
Task.GENERATE_CONTENT,
"key",
true,
"",
{
timeout: 100,
},
fetchStub as typeof fetch,
);
} catch (e) {
expect((e as GoogleGenerativeAIAbortError).message).to.include(
"Request aborted",
);
}
expect(fetchStub).to.be.calledOnce;
});
it("error with server timeout", async () => {
const fetchStub = stub().resolves({
ok: false,
status: 500,
Expand Down
8 changes: 7 additions & 1 deletion src/requests/request.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import { RequestOptions, SingleRequestOptions } from "../../types";
import {
GoogleGenerativeAIAbortError,
GoogleGenerativeAIError,
GoogleGenerativeAIFetchError,
GoogleGenerativeAIRequestInputError,
Expand Down Expand Up @@ -172,7 +173,12 @@ export async function makeRequest(

function handleResponseError(e: Error, url: string): void {
let err = e;
if (
if (err.name === "AbortError") {
err = new GoogleGenerativeAIAbortError(
`Request aborted when fetching ${url.toString()}: ${e.message}`,
);
err.stack = e.stack;
} else if (
!(
e instanceof GoogleGenerativeAIFetchError ||
e instanceof GoogleGenerativeAIRequestInputError
Expand Down
47 changes: 47 additions & 0 deletions src/requests/stream-reader.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import { restore } from "sinon";
import * as sinonChai from "sinon-chai";
import {
getChunkedStream,
getErrorStream,
getMockResponseStreaming,
} from "../../test-utils/mock-response";
import {
Expand All @@ -34,6 +35,10 @@ import {
HarmCategory,
HarmProbability,
} from "../../types";
import {
GoogleGenerativeAIAbortError,
GoogleGenerativeAIError,
} from "../errors";

use(sinonChai);

Expand Down Expand Up @@ -61,6 +66,48 @@ describe("getResponseStream", () => {
}
expect(responses).to.deep.equal(src);
});
it("throw AbortError", async () => {
const inputStream = getErrorStream(
new DOMException("Simulated AbortError", "AbortError"),
).pipeThrough(new TextDecoderStream("utf8", { fatal: true }));
const responseStream = getResponseStream<{ text: string }>(inputStream);
const reader = responseStream.getReader();
const responses: Array<{ text: string }> = [];
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
responses.push(value);
}
} catch (e) {
expect((e as GoogleGenerativeAIAbortError).message).to.include(
"Request aborted",
);
}
});
it("throw non AbortError", async () => {
const inputStream = getErrorStream(
new DOMException("Simulated Error", "RandomError"),
).pipeThrough(new TextDecoderStream("utf8", { fatal: true }));
const responseStream = getResponseStream<{ text: string }>(inputStream);
const reader = responseStream.getReader();
const responses: Array<{ text: string }> = [];
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
responses.push(value);
}
} catch (e) {
expect((e as GoogleGenerativeAIError).message).to.include(
"Error reading from the stream",
);
}
});
});

describe("processStream", () => {
Expand Down
75 changes: 47 additions & 28 deletions src/requests/stream-reader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@ import {
GenerateContentStreamResult,
Part,
} from "../../types";
import { GoogleGenerativeAIError } from "../errors";
import {
GoogleGenerativeAIAbortError,
GoogleGenerativeAIError,
} from "../errors";
import { addHelpers } from "./response-helpers";

const responseLineRE = /^data\: (.*)(?:\n\n|\r\r|\r\n\r\n)/;
Expand Down Expand Up @@ -89,38 +92,54 @@ export function getResponseStream<T>(
let currentText = "";
return pump();
function pump(): Promise<(() => Promise<void>) | undefined> {
return reader.read().then(({ value, done }) => {
if (done) {
if (currentText.trim()) {
controller.error(
new GoogleGenerativeAIError("Failed to parse stream"),
);
return reader
.read()
.then(({ value, done }) => {
if (done) {
if (currentText.trim()) {
controller.error(
new GoogleGenerativeAIError("Failed to parse stream"),
);
return;
}
controller.close();
return;
}
controller.close();
return;
}

currentText += value;
let match = currentText.match(responseLineRE);
let parsedResponse: T;
while (match) {
try {
parsedResponse = JSON.parse(match[1]);
} catch (e) {
controller.error(
new GoogleGenerativeAIError(
`Error parsing JSON response: "${match[1]}"`,
),
currentText += value;
let match = currentText.match(responseLineRE);
let parsedResponse: T;
while (match) {
try {
parsedResponse = JSON.parse(match[1]);
} catch (e) {
controller.error(
new GoogleGenerativeAIError(
`Error parsing JSON response: "${match[1]}"`,
),
);
return;
}
controller.enqueue(parsedResponse);
currentText = currentText.substring(match[0].length);
match = currentText.match(responseLineRE);
}
return pump();
})
.catch((e: Error) => {
let err = e;
err.stack = e.stack;
if (err.name === "AbortError") {
err = new GoogleGenerativeAIAbortError(
"Request aborted when reading from the stream",
);
} else {
err = new GoogleGenerativeAIError(
"Error reading from the stream",
);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we just do throw new GoogleGenerativeAIError('Failed to perform operation', { cause: error }); Error supports cause, and I think it negates the need for specific classes.

return;
}
controller.enqueue(parsedResponse);
currentText = currentText.substring(match[0].length);
match = currentText.match(responseLineRE);
}
return pump();
});
throw err;
});
}
},
});
Expand Down
13 changes: 13 additions & 0 deletions test-utils/mock-response.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,19 @@ export function getChunkedStream(

return stream;
}

/**
* Returns a stream which would throw the given error.
*/
export function getErrorStream(err: Error): ReadableStream<Uint8Array> {
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.error(err);
},
});
return stream;
}

export function getMockResponseStreaming(
filename: string,
chunkLength: number = 20,
Expand Down