1- from flask import Flask , send_from_directory , request , jsonify
1+ from flask import Flask , render_template , jsonify , request
22import requests
3- from werkzeug .exceptions import NotFound
43from ollamaClient import OllamaClient
54
6- # Constants
7- TEMPLATE_FOLDER = "./templates"
8-
95# Initialize Flask app and Ollama client
106app = Flask (__name__ )
117ai = OllamaClient ()
1410# Route to serve the main index page
1511@app .route ("/" )
1612def index ():
17- return send_from_directory (directory = TEMPLATE_FOLDER , path = "index.html" )
18-
13+ return render_template ("index.html" ),200
1914
20- # Route to handle other static pages
21- @app .route ("/<path:name>" )
22- def page_handler (name ):
23- try :
24- return send_from_directory (directory = TEMPLATE_FOLDER , path = name )
25- except NotFound :
26- return send_from_directory (directory = TEMPLATE_FOLDER , path = "404.html" ), 404
2715
16+ @app .errorhandler (Exception )
17+ def error_page (error ):
18+ if hasattr (error , "code" ) and error .code == 404 :
19+ return render_template ("404.html" ), 404
2820
29- # Custom 404 error handler
30- @ app . errorhandler ( 404 )
31- def page_not_found ( error ):
32- return send_from_directory ( directory = TEMPLATE_FOLDER , path = "404.html" ), 404
21+ return (
22+ render_template ( "error.html" , error = str ( error )),
23+ 500 ,
24+ ) # Return 500 for general errors
3325
3426
3527# API endpoint to check connection to the Ollama server
3628@app .route ("/api/connection" )
3729def api_connection ():
3830 try :
39- # Attempt to connect to the Ollama server
4031 res = requests .get ("http://localhost:11434" , timeout = 5 )
4132 status_code = res .status_code
4233 except (requests .exceptions .ConnectionError , requests .exceptions .Timeout ):
4334 print ("[LOG] ERROR: UNABLE TO CONNECT TO SERVER" )
4435 status_code = 404
4536
46- # Return appropriate response based on connection status
4737 if status_code == 200 :
4838 return jsonify ({"host" : "localhost" , "port" : 11434 , "status" : "OK" }), 200
4939 else :
@@ -53,14 +43,12 @@ def api_connection():
5343# API endpoint to get connection stats (available and active models)
5444@app .route ("/api/connection/stats" )
5545def api_connection_stats ():
56- list_of_models = ai .listModels ()
57- list_of_active_models = ai .listActiveModels ()
46+ list_of_models = ai .list_models ()
47+ list_of_active_models = ai .list_active_models ()
5848
59- # Handle cases where no models are available or active
6049 if not list_of_models and not list_of_active_models :
6150 return jsonify ({"error" : "No models found" }), 404
6251
63- # Prepare response data
6452 data = {
6553 "available" : list_of_models ,
6654 "active" : list_of_active_models ,
@@ -69,22 +57,24 @@ def api_connection_stats():
6957 return jsonify (data ), 200
7058
7159
72- # API endpoint to handle chat requests
7360@app .route ("/api/chat" , methods = ["POST" ])
7461def api_chat ():
75- data = request .get_json ()
62+ # Get JSON data from the request
63+ data = request .json
64+ prompt = data .get ("prompt" )
65+ model = data .get ("model" )
7666
77- # Validate required fields in the request
78- if not data or "prompt" not in data or "model" not in data :
79- return jsonify ({"error" : "Missing 'prompt' or 'model' in request" }), 400
67+ if not prompt :
68+ return jsonify ({"error" : "Missing 'prompt' in request" }), 400
69+ if not model :
70+ return jsonify ({"error" : "Missing 'model' in request" }), 400
71+
72+ # Assuming ai.chat is a function that takes prompt and model as arguments
73+ res = ai .chat (prompt = prompt , model = model )
74+
75+ # Return the response as JSON
76+ return jsonify ({"response" : res }), 200
8077
81- try :
82- # Send the chat request to the Ollama client
83- res = ai .chat (prompt = data .get ("prompt" ), model = data .get ("model" ))
84- return jsonify (res ), 200
85- except Exception as e :
86- print (f"[LOG] ERROR: { e } " )
87- return jsonify ({"error" : "Failed to process chat request" }), 500
8878
8979if __name__ == "__main__" :
90- app .run ()
80+ app .run (debug = True ) # Set debug=True for development
0 commit comments