@@ -1490,6 +1490,137 @@ describe('result.pipeTextStreamToResponse', async () => {
1490
1490
} ) ;
1491
1491
} ) ;
1492
1492
1493
+ describe ( 'result.toDataStream' , ( ) => {
1494
+ it ( 'should create a data stream' , async ( ) => {
1495
+ const result = await streamText ( {
1496
+ model : new MockLanguageModelV1 ( {
1497
+ doStream : async ( ) => ( {
1498
+ stream : convertArrayToReadableStream ( [
1499
+ { type : 'text-delta' , textDelta : 'Hello' } ,
1500
+ { type : 'text-delta' , textDelta : ', ' } ,
1501
+ { type : 'text-delta' , textDelta : 'world!' } ,
1502
+ {
1503
+ type : 'finish' ,
1504
+ finishReason : 'stop' ,
1505
+ usage : { promptTokens : 3 , completionTokens : 10 } ,
1506
+ } ,
1507
+ ] ) ,
1508
+ rawCall : { rawPrompt : 'prompt' , rawSettings : { } } ,
1509
+ } ) ,
1510
+ } ) ,
1511
+ prompt : 'test-input' ,
1512
+ } ) ;
1513
+
1514
+ const dataStream = result . toDataStream ( ) ;
1515
+
1516
+ expect (
1517
+ await convertReadableStreamToArray (
1518
+ dataStream . pipeThrough ( new TextDecoderStream ( ) ) ,
1519
+ ) ,
1520
+ ) . toEqual ( [
1521
+ '0:"Hello"\n' ,
1522
+ '0:", "\n' ,
1523
+ '0:"world!"\n' ,
1524
+ 'e:{"finishReason":"stop","usage":{"promptTokens":3,"completionTokens":10}}\n' ,
1525
+ 'd:{"finishReason":"stop","usage":{"promptTokens":3,"completionTokens":10}}\n' ,
1526
+ ] ) ;
1527
+ } ) ;
1528
+
1529
+ it ( 'should support merging with existing stream data' , async ( ) => {
1530
+ const result = await streamText ( {
1531
+ model : new MockLanguageModelV1 ( {
1532
+ doStream : async ( ) => ( {
1533
+ stream : convertArrayToReadableStream ( [
1534
+ { type : 'text-delta' , textDelta : 'Hello' } ,
1535
+ { type : 'text-delta' , textDelta : ', ' } ,
1536
+ { type : 'text-delta' , textDelta : 'world!' } ,
1537
+ {
1538
+ type : 'finish' ,
1539
+ finishReason : 'stop' ,
1540
+ usage : { promptTokens : 3 , completionTokens : 10 } ,
1541
+ } ,
1542
+ ] ) ,
1543
+ rawCall : { rawPrompt : 'prompt' , rawSettings : { } } ,
1544
+ } ) ,
1545
+ } ) ,
1546
+ prompt : 'test-input' ,
1547
+ } ) ;
1548
+
1549
+ const streamData = new StreamData ( ) ;
1550
+ streamData . append ( 'stream-data-value' ) ;
1551
+ streamData . close ( ) ;
1552
+
1553
+ const dataStream = result . toDataStream ( { data : streamData } ) ;
1554
+
1555
+ expect (
1556
+ await convertReadableStreamToArray (
1557
+ dataStream . pipeThrough ( new TextDecoderStream ( ) ) ,
1558
+ ) ,
1559
+ ) . toEqual ( [
1560
+ '2:["stream-data-value"]\n' ,
1561
+ '0:"Hello"\n' ,
1562
+ '0:", "\n' ,
1563
+ '0:"world!"\n' ,
1564
+ 'e:{"finishReason":"stop","usage":{"promptTokens":3,"completionTokens":10}}\n' ,
1565
+ 'd:{"finishReason":"stop","usage":{"promptTokens":3,"completionTokens":10}}\n' ,
1566
+ ] ) ;
1567
+ } ) ;
1568
+
1569
+ it ( 'should mask error messages by default' , async ( ) => {
1570
+ const result = await streamText ( {
1571
+ model : new MockLanguageModelV1 ( {
1572
+ doStream : async ( ) => ( {
1573
+ stream : convertArrayToReadableStream ( [
1574
+ { type : 'error' , error : 'error' } ,
1575
+ ] ) ,
1576
+ rawCall : { rawPrompt : 'prompt' , rawSettings : { } } ,
1577
+ } ) ,
1578
+ } ) ,
1579
+ prompt : 'test-input' ,
1580
+ } ) ;
1581
+
1582
+ const dataStream = result . toDataStream ( ) ;
1583
+
1584
+ expect (
1585
+ await convertReadableStreamToArray (
1586
+ dataStream . pipeThrough ( new TextDecoderStream ( ) ) ,
1587
+ ) ,
1588
+ ) . toEqual ( [
1589
+ '3:""\n' ,
1590
+ 'e:{"finishReason":"error","usage":{"promptTokens":0,"completionTokens":0}}\n' ,
1591
+ 'd:{"finishReason":"error","usage":{"promptTokens":0,"completionTokens":0}}\n' ,
1592
+ ] ) ;
1593
+ } ) ;
1594
+
1595
+ it ( 'should support custom error messages' , async ( ) => {
1596
+ const result = await streamText ( {
1597
+ model : new MockLanguageModelV1 ( {
1598
+ doStream : async ( ) => ( {
1599
+ stream : convertArrayToReadableStream ( [
1600
+ { type : 'error' , error : 'error' } ,
1601
+ ] ) ,
1602
+ rawCall : { rawPrompt : 'prompt' , rawSettings : { } } ,
1603
+ } ) ,
1604
+ } ) ,
1605
+ prompt : 'test-input' ,
1606
+ } ) ;
1607
+
1608
+ const dataStream = result . toDataStream ( {
1609
+ getErrorMessage : error => `custom error message: ${ error } ` ,
1610
+ } ) ;
1611
+
1612
+ expect (
1613
+ await convertReadableStreamToArray (
1614
+ dataStream . pipeThrough ( new TextDecoderStream ( ) ) ,
1615
+ ) ,
1616
+ ) . toEqual ( [
1617
+ '3:"custom error message: error"\n' ,
1618
+ 'e:{"finishReason":"error","usage":{"promptTokens":0,"completionTokens":0}}\n' ,
1619
+ 'd:{"finishReason":"error","usage":{"promptTokens":0,"completionTokens":0}}\n' ,
1620
+ ] ) ;
1621
+ } ) ;
1622
+ } ) ;
1623
+
1493
1624
describe ( 'result.toDataStreamResponse' , ( ) => {
1494
1625
it ( 'should create a Response with a data stream' , async ( ) => {
1495
1626
const result = await streamText ( {
0 commit comments