Skip to content

Commit 3cb16c7

Browse files
committed
Refactor proxy response handling to check for application calls instead of LLM requests
1 parent 7277e48 commit 3cb16c7

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

endpoints/helpers/endpoint.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@ def proxy_response(
1616
data: httpx._types.RequestData | None,
1717
files: httpx._types.RequestFiles | None,
1818
) -> werkzeug.Response:
19-
is_llm, is_stream = check_llm_streaming_request(request)
19+
is_app_call, is_stream = check_app_streaming_request(request)
2020

21-
if is_llm and is_stream:
21+
if is_app_call and is_stream:
2222
return proxy_stream_response(
2323
method=method,
2424
url=url,
@@ -29,7 +29,7 @@ def proxy_response(
2929
files=files,
3030
timeout=httpx.Timeout(None, read=300, write=10),
3131
)
32-
elif is_llm:
32+
elif is_app_call and not is_stream:
3333
return proxy_blocking_response(
3434
method=method,
3535
url=url,
@@ -116,19 +116,19 @@ def OidcApiProxyErrorResponse(message: str, error_code: int = 500) -> werkzeug.R
116116
)
117117

118118

119-
def check_llm_streaming_request(request: werkzeug.Request) -> Tuple[bool, bool]:
120-
is_llm = False
119+
def check_app_streaming_request(request: werkzeug.Request) -> Tuple[bool, bool]:
120+
is_app_call = False
121121
is_stream = False
122122

123123
if request.method.lower() in ["post"]:
124124
if request.path in ["/chat-messages", "/workflows/run"]:
125-
is_llm = True
125+
is_app_call = True
126126
if request.is_json:
127127
json = request.get_json()
128128
if str(json.get("response_mode", "")).lower() == "streaming":
129129
is_stream = True
130130

131-
return is_llm, is_stream
131+
return is_app_call, is_stream
132132

133133

134134
def replace_user_params(

0 commit comments

Comments
 (0)