@@ -1720,314 +1720,6 @@ describe('doStream', () => {
1720
1720
} ) ;
1721
1721
} ) ;
1722
1722
1723
- describe ( 'doStream simulated streaming' , ( ) => {
1724
- function prepareJsonResponse ( {
1725
- content = '' ,
1726
- reasoning_content = '' ,
1727
- tool_calls,
1728
- usage = {
1729
- prompt_tokens : 4 ,
1730
- total_tokens : 34 ,
1731
- completion_tokens : 30 ,
1732
- } ,
1733
- finish_reason = 'stop' ,
1734
- id = 'chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd' ,
1735
- created = 1711115037 ,
1736
- model = 'gpt-3.5-turbo-0125' ,
1737
- } : {
1738
- content ?: string ;
1739
- reasoning_content ?: string ;
1740
- tool_calls ?: Array < {
1741
- id : string ;
1742
- type : 'function' ;
1743
- function : {
1744
- name : string ;
1745
- arguments : string ;
1746
- } ;
1747
- } > ;
1748
- usage ?: {
1749
- prompt_tokens ?: number ;
1750
- total_tokens ?: number ;
1751
- completion_tokens ?: number ;
1752
- } ;
1753
- finish_reason ?: string ;
1754
- created ?: number ;
1755
- id ?: string ;
1756
- model ?: string ;
1757
- } = { } ) {
1758
- server . urls [ 'https://my.api.com/v1/chat/completions' ] . response = {
1759
- type : 'json-value' ,
1760
- body : {
1761
- id,
1762
- object : 'chat.completion' ,
1763
- created,
1764
- model,
1765
- choices : [
1766
- {
1767
- index : 0 ,
1768
- message : {
1769
- role : 'assistant' ,
1770
- content,
1771
- tool_calls,
1772
- reasoning_content,
1773
- } ,
1774
- finish_reason,
1775
- } ,
1776
- ] ,
1777
- usage,
1778
- system_fingerprint : 'fp_3bc1b5746c' ,
1779
- } ,
1780
- } ;
1781
- }
1782
-
1783
- it ( 'should stream text delta' , async ( ) => {
1784
- prepareJsonResponse ( { content : 'Hello, World!' , model : 'o1-preview' } ) ;
1785
-
1786
- const model = provider . chatModel ( 'o1' , {
1787
- simulateStreaming : true ,
1788
- } ) ;
1789
-
1790
- const { stream } = await model . doStream ( {
1791
- inputFormat : 'prompt' ,
1792
- prompt : TEST_PROMPT ,
1793
- } ) ;
1794
-
1795
- expect ( await convertReadableStreamToArray ( stream ) ) . toMatchInlineSnapshot ( `
1796
- [
1797
- {
1798
- "body": {
1799
- "choices": [
1800
- {
1801
- "finish_reason": "stop",
1802
- "index": 0,
1803
- "message": {
1804
- "content": "Hello, World!",
1805
- "reasoning_content": "",
1806
- "role": "assistant",
1807
- },
1808
- },
1809
- ],
1810
- "created": 1711115037,
1811
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
1812
- "model": "o1-preview",
1813
- "object": "chat.completion",
1814
- "system_fingerprint": "fp_3bc1b5746c",
1815
- "usage": {
1816
- "completion_tokens": 30,
1817
- "prompt_tokens": 4,
1818
- "total_tokens": 34,
1819
- },
1820
- },
1821
- "headers": {
1822
- "content-length": "349",
1823
- "content-type": "application/json",
1824
- },
1825
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
1826
- "modelId": "o1-preview",
1827
- "timestamp": 2024-03-22T13:43:57.000Z,
1828
- "type": "response-metadata",
1829
- },
1830
- {
1831
- "textDelta": "Hello, World!",
1832
- "type": "text-delta",
1833
- },
1834
- {
1835
- "finishReason": "stop",
1836
- "logprobs": undefined,
1837
- "providerMetadata": {
1838
- "test-provider": {},
1839
- },
1840
- "type": "finish",
1841
- "usage": {
1842
- "completionTokens": 30,
1843
- "promptTokens": 4,
1844
- },
1845
- },
1846
- ]
1847
- ` ) ;
1848
- } ) ;
1849
-
1850
- it ( 'should stream reasoning content before text delta in simulated streaming' , async ( ) => {
1851
- prepareJsonResponse ( {
1852
- content : 'Hello, World!' ,
1853
- reasoning_content : 'This is the reasoning' ,
1854
- model : 'o1-preview' ,
1855
- } ) ;
1856
-
1857
- const model = provider . chatModel ( 'o1' , {
1858
- simulateStreaming : true ,
1859
- } ) ;
1860
-
1861
- const { stream } = await model . doStream ( {
1862
- inputFormat : 'prompt' ,
1863
- prompt : TEST_PROMPT ,
1864
- } ) ;
1865
-
1866
- expect ( await convertReadableStreamToArray ( stream ) ) . toMatchInlineSnapshot ( `
1867
- [
1868
- {
1869
- "body": {
1870
- "choices": [
1871
- {
1872
- "finish_reason": "stop",
1873
- "index": 0,
1874
- "message": {
1875
- "content": "Hello, World!",
1876
- "reasoning_content": "This is the reasoning",
1877
- "role": "assistant",
1878
- },
1879
- },
1880
- ],
1881
- "created": 1711115037,
1882
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
1883
- "model": "o1-preview",
1884
- "object": "chat.completion",
1885
- "system_fingerprint": "fp_3bc1b5746c",
1886
- "usage": {
1887
- "completion_tokens": 30,
1888
- "prompt_tokens": 4,
1889
- "total_tokens": 34,
1890
- },
1891
- },
1892
- "headers": {
1893
- "content-length": "370",
1894
- "content-type": "application/json",
1895
- },
1896
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
1897
- "modelId": "o1-preview",
1898
- "timestamp": 2024-03-22T13:43:57.000Z,
1899
- "type": "response-metadata",
1900
- },
1901
- {
1902
- "textDelta": "This is the reasoning",
1903
- "type": "reasoning",
1904
- },
1905
- {
1906
- "textDelta": "Hello, World!",
1907
- "type": "text-delta",
1908
- },
1909
- {
1910
- "finishReason": "stop",
1911
- "logprobs": undefined,
1912
- "providerMetadata": {
1913
- "test-provider": {},
1914
- },
1915
- "type": "finish",
1916
- "usage": {
1917
- "completionTokens": 30,
1918
- "promptTokens": 4,
1919
- },
1920
- },
1921
- ]
1922
- ` ) ;
1923
- } ) ;
1924
-
1925
- it ( 'should stream tool calls' , async ( ) => {
1926
- prepareJsonResponse ( {
1927
- model : 'o1-preview' ,
1928
- tool_calls : [
1929
- {
1930
- id : 'call_O17Uplv4lJvD6DVdIvFFeRMw' ,
1931
- type : 'function' ,
1932
- function : {
1933
- name : 'test-tool' ,
1934
- arguments : '{"value":"Sparkle Day"}' ,
1935
- } ,
1936
- } ,
1937
- ] ,
1938
- } ) ;
1939
-
1940
- const model = provider . chatModel ( 'o1' , {
1941
- simulateStreaming : true ,
1942
- } ) ;
1943
-
1944
- const { stream } = await model . doStream ( {
1945
- inputFormat : 'prompt' ,
1946
- tools : [
1947
- {
1948
- type : 'function' ,
1949
- name : 'test-tool' ,
1950
- parameters : {
1951
- type : 'object' ,
1952
- properties : { value : { type : 'string' } } ,
1953
- required : [ 'value' ] ,
1954
- additionalProperties : false ,
1955
- $schema : 'http://json-schema.org/draft-07/schema#' ,
1956
- } ,
1957
- } ,
1958
- ] ,
1959
- prompt : TEST_PROMPT ,
1960
- } ) ;
1961
-
1962
- expect ( await convertReadableStreamToArray ( stream ) ) . toMatchInlineSnapshot ( `
1963
- [
1964
- {
1965
- "body": {
1966
- "choices": [
1967
- {
1968
- "finish_reason": "stop",
1969
- "index": 0,
1970
- "message": {
1971
- "content": "",
1972
- "reasoning_content": "",
1973
- "role": "assistant",
1974
- "tool_calls": [
1975
- {
1976
- "function": {
1977
- "arguments": "{"value":"Sparkle Day"}",
1978
- "name": "test-tool",
1979
- },
1980
- "id": "call_O17Uplv4lJvD6DVdIvFFeRMw",
1981
- "type": "function",
1982
- },
1983
- ],
1984
- },
1985
- },
1986
- ],
1987
- "created": 1711115037,
1988
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
1989
- "model": "o1-preview",
1990
- "object": "chat.completion",
1991
- "system_fingerprint": "fp_3bc1b5746c",
1992
- "usage": {
1993
- "completion_tokens": 30,
1994
- "prompt_tokens": 4,
1995
- "total_tokens": 34,
1996
- },
1997
- },
1998
- "headers": {
1999
- "content-length": "482",
2000
- "content-type": "application/json",
2001
- },
2002
- "id": "chatcmpl-95ZTZkhr0mHNKqerQfiwkuox3PHAd",
2003
- "modelId": "o1-preview",
2004
- "timestamp": 2024-03-22T13:43:57.000Z,
2005
- "type": "response-metadata",
2006
- },
2007
- {
2008
- "args": "{"value":"Sparkle Day"}",
2009
- "toolCallId": "call_O17Uplv4lJvD6DVdIvFFeRMw",
2010
- "toolCallType": "function",
2011
- "toolName": "test-tool",
2012
- "type": "tool-call",
2013
- },
2014
- {
2015
- "finishReason": "stop",
2016
- "logprobs": undefined,
2017
- "providerMetadata": {
2018
- "test-provider": {},
2019
- },
2020
- "type": "finish",
2021
- "usage": {
2022
- "completionTokens": 30,
2023
- "promptTokens": 4,
2024
- },
2025
- },
2026
- ]
2027
- ` ) ;
2028
- } ) ;
2029
- } ) ;
2030
-
2031
1723
describe ( 'metadata extraction' , ( ) => {
2032
1724
const testMetadataExtractor = {
2033
1725
extractMetadata : ( { parsedBody } : { parsedBody : unknown } ) => {
0 commit comments