|
| 1 | +❯ pnpm tsx src/generate-text/anthropic-cache-control.ts |
| 2 | +Body { |
| 3 | + "model": "claude-3-5-sonnet-20240620", |
| 4 | + "max_tokens": 4096, |
| 5 | + "temperature": 0, |
| 6 | + "messages": [ |
| 7 | + { |
| 8 | + "role": "user", |
| 9 | + "content": [ |
| 10 | + { |
| 11 | + "type": "text", |
| 12 | + "text": "You are a JavaScript expert." |
| 13 | + }, |
| 14 | + { |
| 15 | + "type": "text", |
| 16 | + "text": "Error messages: \nAPICallError [AI_APICallError]: Failed to process error response\n at postToApi (/Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:382:15)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n ... 4 lines matching cause stack trace ...\n at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2723:36)\n at async /Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:339:22\n at async main (/Users/larsgrammel/repositories/ai/examples/ai-core/src/generate-text/anthropic-cache-control.ts:2:1351) {\n cause: TypeError: Body is unusable\n at consumeBody (node:internal/deps/undici/undici:4281:15)\n at _Response.text (node:internal/deps/undici/undici:4236:18)\n at /Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:443:39\n at postToApi (/Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:373:34)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async AnthropicMessagesLanguageModel.doGenerate (/Users/larsgrammel/repositories/ai/packages/anthropic/dist/index.js:316:50)\n at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2748:34)\n at async /Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:339:22\n at async _retryWithExponentialBackoff (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:170:12)\n at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2723:36),\n url: 'https://api.anthropic.com/v1/messages',\n requestBodyValues: {\n model: 'claude-3-5-sonnet-20240620',\n top_k: undefined,\n max_tokens: 4096,\n temperature: 0,\n top_p: undefined,\n stop_sequences: undefined,\n system: undefined,\n messages: [ [Object] ],\n tools: undefined,\n tool_choice: undefined\n },\n statusCode: 400,\n responseHeaders: {\n 'cf-cache-status': 'DYNAMIC',\n 'cf-ray': '8b39b60ab8734516-TXL',\n connection: 'keep-alive',\n 'content-length': '171',\n 'content-type': 'application/json',\n date: 'Thu, 15 Aug 2024 14:00:28 GMT',\n 'request-id': 'req_01PLrS159iiihG7kS9PFQiqx',\n server: 'cloudflare',\n via: '1.1 google',\n 'x-cloud-trace-context': '1371f8e6d358102b79d109db3829d62e',\n 'x-robots-tag': 'none',\n 'x-should-retry': 'false'\n },\n responseBody: undefined,\n isRetryable: false,\n data: undefined,\n [Symbol(vercel.ai.error)]: true,\n [Symbol(vercel.ai.error.AI_APICallError)]: true\n}", |
| 17 | + "cache_control": { |
| 18 | + "type": "ephemeral" |
| 19 | + } |
| 20 | + }, |
| 21 | + { |
| 22 | + "type": "text", |
| 23 | + "text": "Explain the error message." |
| 24 | + } |
| 25 | + ] |
| 26 | + } |
| 27 | + ] |
| 28 | +} |
| 29 | +Fetched {"type":"error","error":{"type":"invalid_request_error","message":"The message up to and including the first cache-control block must be at least 1024 tokens. Found: 939."}} |
| 30 | + |
| 31 | +APICallError [AI_APICallError]: Failed to process error response |
| 32 | + at postToApi (/Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:382:15) |
| 33 | + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) |
| 34 | + ... 4 lines matching cause stack trace ... |
| 35 | + at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2723:36) |
| 36 | + at async /Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:339:22 |
| 37 | + at async main (/Users/larsgrammel/repositories/ai/examples/ai-core/src/generate-text/anthropic-cache-control.ts:54:361) { |
| 38 | + cause: TypeError: Body is unusable |
| 39 | + at consumeBody (node:internal/deps/undici/undici:4281:15) |
| 40 | + at _Response.text (node:internal/deps/undici/undici:4236:18) |
| 41 | + at /Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:443:39 |
| 42 | + at postToApi (/Users/larsgrammel/repositories/ai/packages/provider-utils/dist/index.js:373:34) |
| 43 | + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) |
| 44 | + at async AnthropicMessagesLanguageModel.doGenerate (/Users/larsgrammel/repositories/ai/packages/anthropic/dist/index.js:316:50) |
| 45 | + at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2748:34) |
| 46 | + at async /Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:339:22 |
| 47 | + at async _retryWithExponentialBackoff (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:170:12) |
| 48 | + at async fn (/Users/larsgrammel/repositories/ai/packages/ai/dist/index.js:2723:36), |
| 49 | + url: 'https://api.anthropic.com/v1/messages', |
| 50 | + requestBodyValues: { |
| 51 | + model: 'claude-3-5-sonnet-20240620', |
| 52 | + top_k: undefined, |
| 53 | + max_tokens: 4096, |
| 54 | + temperature: 0, |
| 55 | + top_p: undefined, |
| 56 | + stop_sequences: undefined, |
| 57 | + system: undefined, |
| 58 | + messages: [ [Object] ], |
| 59 | + tools: undefined, |
| 60 | + tool_choice: undefined |
| 61 | + }, |
| 62 | + statusCode: 400, |
| 63 | + responseHeaders: { |
| 64 | + 'cf-cache-status': 'DYNAMIC', |
| 65 | + 'cf-ray': '8b39b87a8f684541-TXL', |
| 66 | + connection: 'keep-alive', |
| 67 | + 'content-length': '173', |
| 68 | + 'content-type': 'application/json', |
| 69 | + date: 'Thu, 15 Aug 2024 14:02:08 GMT', |
| 70 | + 'request-id': 'req_01YZqjpifTdvLZqfwBieLs44', |
| 71 | + server: 'cloudflare', |
| 72 | + via: '1.1 google', |
| 73 | + 'x-cloud-trace-context': '00f2b1629d0dc8c6a4714db1dbdb4c2c', |
| 74 | + 'x-robots-tag': 'none', |
| 75 | + 'x-should-retry': 'false' |
| 76 | + }, |
| 77 | + responseBody: undefined, |
| 78 | + isRetryable: false, |
| 79 | + data: undefined, |
| 80 | + [Symbol(vercel.ai.error)]: true, |
| 81 | + [Symbol(vercel.ai.error.AI_APICallError)]: true |
| 82 | +} |
| 83 | +} |
0 commit comments