Coverage for app/controllers/ai.py: 66%
29 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-05-02 02:49 +0000
« prev ^ index » next coverage.py v7.6.12, created at 2025-05-02 02:49 +0000
1from openai import OpenAI
2from flask import current_app, g
3from enum import Enum
4from werkzeug.local import LocalProxy
5import google.generativeai as genai
9OLLAMA_URL = "http://localhost:11434/api/generate"
10DEFAULT_MODEL_NAME = "codellama:7b"
12# system command to create a special AI model
13# {
14# "name": "copilot-style-codellama",
15# "from": "codellama",
16# "system": "You are an AI assistant that provides code completions similar to GitHub Copilot. However, when specified you will need to respond with small errors that are left for the user to detect. Never point these errors out in any way. Your responses should be concise and continue the user's code seamlessly. Please follow all of the commands when giving completions. SYSTEM: Only respond with the code following the prompt, SYSTEM: Avoid pointing out mistakes, SYSTEM: Respond in only plane text, SYSTEM: Avoid including text describing or explaining the code mistake at all, SYSTEM: Provide comments only where necessary, SYSTEM: Avoid providing additional information, SYSTEM: If the prompt asks for an error, introduce a small syntax or logic mistake in the code. Do not explain or provide any extra context.",
17# "parameters": {
18# "temperature": 0.2,
19# "top_k": 50
20# }
21# }
23commands = [
24 "SYSTEM: Only respond with the code following the prompt.",
25 "SYSTEM: Avoid pointing out mistakes.",
26 "SYSTEM: Respond in only plane text.",
27 "SYSTEM: Avoid including text describing or explaining the code mistake at all",
28 "SYSTEM: Provide comments only where necessary.",
29 "SYSTEM: Avoid providing additional information.",
30 "SYSTEM: If the prompt asks for an error, introduce a small syntax or logic mistake in the code. Do not explain or provide any extra context."
31]
33good_command = "SYSTEM: Complete the following code:"
34bad_command = "SYSTEM: Complete the following code but introduce a small syntax or logic mistake:"
39class vendors(Enum):
40 OpenAI = "openai"
41 Ollama = "ollama"
42 Google = "google"
44default_openai_parameters = {
45 "temperature": 0.2,
46 "top_p": 1,
47 "top_k": 1,
48 "max_tokens": 256,
49}
51def get_openai():
52 with current_app.app_context():
53 if "ai" not in g:
54 g.ai = OpenAI(api_key=current_app.config["OPENAI_API_KEY"])
55 return g.ai
56def get_gemini():
57 if "gemini" not in g:
58 genai.configure(api_key=current_app.config["GEMINI_API_KEY"])
59 # Create the model
60 generation_config = {
61 "temperature": 0.7,
62 "response_mime_type": "application/json",
63 }
64 g.gemini = genai.GenerativeModel(
65 model_name="gemini-2.0-flash",
66 generation_config=generation_config,
67 )
68 g.gemini.chat_session = g.gemini.start_chat(
69 history=[]
70 )
71 return g.gemini
73openai_client = LocalProxy(get_openai)
74gemini_client = LocalProxy(get_gemini)