Skip to content

Commit 5d4ee2d

Browse files
committed
Trace/replay llms.internal.sendRequest
To decouple `texampleTests.m` from availability and speed of external servers, record calls to `llms.internal.sendRequest` (on dev machine) and replay (during most test runs, including CI). See tests/recording/README.md for instructions.
1 parent 062538c commit 5d4ee2d

28 files changed

+3828
-18
lines changed

+llms/+internal/callAzureChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464

6565
parameters = buildParametersCall(messages, functions, nvp);
6666

67-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
67+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
6868

6969
% If call errors, "choices" will not be part of response.Body.Data, instead
7070
% we get response.Body.Data.error

+llms/+internal/callOllamaChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353

5454
parameters = buildParametersCall(model, messages, nvp);
5555

56-
[response, streamedText] = llms.internal.sendRequest(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
56+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
5757

5858
% If call errors, "choices" will not be part of response.Body.Data, instead
5959
% we get response.Body.Data.error

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@
6262

6363
parameters = buildParametersCall(messages, functions, nvp);
6464

65-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
65+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
6666

6767
% If call errors, "choices" will not be part of response.Body.Data, instead
6868
% we get response.Body.Data.error

+llms/+internal/sendRequestWrapper.m

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
function [response, streamedText] = sendRequestWrapper(varargin)
2+
% This function is undocumented and will change in a future release
3+
4+
% A wrapper around sendRequest to have a test seam
5+
[response, streamedText] = llms.internal.sendRequest(varargin{:});

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
*.env
22
*.asv
33
*.mat
4+
!tests/recordings/*.mat
45
startup.m
56
papers_to_read.csv
67
data/*

extractOpenAIEmbeddings.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
end
4848

4949

50-
response = llms.internal.sendRequest(parameters,key, END_POINT, nvp.TimeOut);
50+
response = llms.internal.sendRequestWrapper(parameters,key, END_POINT, nvp.TimeOut);
5151

5252
if isfield(response.Body.Data, "data")
5353
emb = [response.Body.Data.data.embedding];
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
2+
% This function is undocumented and will change in a future release
3+
4+
% A wrapper around sendRequest to have a test seam
5+
persistent seenCalls
6+
if isempty(seenCalls)
7+
seenCalls = cell(0,2);
8+
end
9+
10+
persistent filename
11+
12+
if nargin == 1 && isequal(parameters,"close")
13+
save(filename+".mat","seenCalls");
14+
seenCalls = cell(0,2);
15+
return
16+
end
17+
18+
if nargin==2 && isequal(parameters,"open")
19+
filename = token;
20+
return
21+
end
22+
23+
streamFunCalls = {};
24+
hasCallback = nargin >= 5 && isa(varargin{3},'function_handle');
25+
if hasCallback
26+
streamFun = varargin{3};
27+
end
28+
function wrappedStreamFun(varargin)
29+
streamFunCalls(end+1) = varargin;
30+
streamFun(varargin{:});
31+
end
32+
if hasCallback
33+
varargin{3} = @wrappedStreamFun;
34+
end
35+
36+
37+
[response, streamedText] = llms.internal.sendRequest(parameters, token, varargin{:});
38+
39+
seenCalls(end+1,:) = {{parameters},{response,streamFunCalls,streamedText}};
40+
end
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
function addpath(~)
2+
% ignore addpath calls in examples
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
2+
% This function is undocumented and will change in a future release
3+
4+
% A wrapper around sendRequest to have a test seam
5+
persistent seenCalls
6+
if isempty(seenCalls)
7+
seenCalls = cell(0,2);
8+
end
9+
10+
if nargin == 1 && isequal(parameters,"close")
11+
seenCalls = cell(0,2);
12+
return
13+
end
14+
15+
if nargin==2 && isequal(parameters,"open")
16+
load(token+".mat","seenCalls");
17+
return
18+
end
19+
20+
result = seenCalls{1,2};
21+
response = result{1};
22+
streamFunCalls = result{2};
23+
streamedText = result{3};
24+
25+
if nargin >= 5 && isa(varargin{3},'function_handle')
26+
streamFun = varargin{3};
27+
cellfun(streamFun, streamFunCalls);
28+
end
29+
30+
seenCalls(1,:) = [];
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
function addpath(~)
2+
% ignore addpath calls in examples

0 commit comments

Comments
 (0)