1919from semantic_kernel .functions .kernel_function_decorator import kernel_function
2020from semantic_kernel .kernel import Kernel
2121import pymssql
22- from dotenv import load_dotenv
23- load_dotenv ()
2422# Azure Function App
2523app = func .FunctionApp (http_auth_level = func .AuthLevel .ANONYMOUS )
2624
2725
28- # Set up logging
29- logger = logging .getLogger (__name__ )
30-
3126endpoint = os .environ .get ("AZURE_OPEN_AI_ENDPOINT" )
3227api_key = os .environ .get ("AZURE_OPEN_AI_API_KEY" )
3328api_version = os .environ .get ("OPENAI_API_VERSION" )
@@ -41,8 +36,6 @@ class ChatWithDataPlugin:
4136 @kernel_function (name = "Greeting" , description = "Respond to any greeting or general questions" )
4237 def greeting (self , input : Annotated [str , "the question" ]) -> Annotated [str , "The output is a string" ]:
4338
44- logger .info ("Kernel Function - Greeting Initited" )
45-
4639 query = input .split (':::' )[0 ]
4740 endpoint = os .environ .get ("AZURE_OPEN_AI_ENDPOINT" )
4841 api_key = os .environ .get ("AZURE_OPEN_AI_API_KEY" )
@@ -81,8 +74,6 @@ def get_SQL_Response(
8174 endpoint = os .environ .get ("AZURE_OPEN_AI_ENDPOINT" )
8275 api_key = os .environ .get ("AZURE_OPEN_AI_API_KEY" )
8376
84- logger .info ("Kernel Function - get_SQL_Response Initited" )
85-
8677
8778 client = openai .AzureOpenAI (
8879 azure_endpoint = endpoint ,
@@ -171,8 +162,6 @@ def get_answers_from_calltranscripts(
171162 search_key = os .environ .get ("AZURE_AI_SEARCH_API_KEY" )
172163 index_name = os .environ .get ("AZURE_SEARCH_INDEX" )
173164
174- logger .info ("Kernel Function - ChatWithCallTranscripts Initited" )
175-
176165 client = openai .AzureOpenAI (
177166 azure_endpoint = endpoint , #f"{endpoint}/openai/deployments/{deployment}/extensions",
178167 api_key = apikey ,
@@ -253,7 +242,6 @@ async def stream_processor(response):
253242@app .route (route = "stream_openai_text" , methods = [func .HttpMethod .GET ])
254243async def stream_openai_text (req : Request ) -> StreamingResponse :
255244
256- logger .info ("Hit 1: stream_openai_text api initiated" )
257245 query = req .query_params .get ("query" , None )
258246
259247 if not query :
@@ -272,8 +260,6 @@ async def stream_openai_text(req: Request) -> StreamingResponse:
272260 deployment_name = deployment
273261 )
274262
275- logger .info ("Hit 2: stream_openai_text api - AzureChatCompletion completed" )
276-
277263
278264 kernel .add_service (ai_service )
279265
@@ -289,9 +275,6 @@ async def stream_openai_text(req: Request) -> StreamingResponse:
289275 settings .max_tokens = 800
290276 settings .temperature = 0
291277
292- logger .info ("Hit 3: stream_openai_text api - settings completed" )
293-
294-
295278 # Read the HTML file
296279 with open ("table.html" , "r" ) as file :
297280 html_content = file .read ()
@@ -313,16 +296,11 @@ async def stream_openai_text(req: Request) -> StreamingResponse:
313296 user_query_prompt = f'''{ user_query } . Always send clientId as { user_query .split (':::' )[- 1 ]} '''
314297 query_prompt = f'''<message role="system">{ system_message } </message><message role="user">{ user_query_prompt } </message>'''
315298
316- logger .info ("Hit 4: stream_openai_text api - Before kernel invoke" )
317-
318299 sk_response = kernel .invoke_prompt_stream (
319300 function_name = "prompt_test" ,
320301 plugin_name = "weather_test" ,
321302 prompt = query_prompt ,
322303 settings = settings
323304 )
324305
325- logger .info ("Hit 5: stream_openai_text api - After kernel invoke" )
326-
327-
328306 return StreamingResponse (stream_processor (sk_response ), media_type = "text/event-stream" )
0 commit comments