diff --git a/changedetectionio/blueprint/settings/templates/settings.html b/changedetectionio/blueprint/settings/templates/settings.html
index 150eb35e..5e268fd5 100644
--- a/changedetectionio/blueprint/settings/templates/settings.html
+++ b/changedetectionio/blueprint/settings/templates/settings.html
@@ -264,17 +264,20 @@ nav
+
New: click here (link to changedetection.io tutorial page) find out how to setup and example
+
+ key fields should be some password type field so you can see its set but doesnt contain the key on view and doesnt lose it on save
+
{{ render_simple_field(form.application.form.ai.form.LLM_backend) }}
Preferred LLM connection
-
- {{ render_checkbox_field(form.application.form.ai.form.openai_key) }}
+ {{ render_checkbox_field(form.application.form.ai.form.API_keys.form.openai) }}
Go here to read more about OpenAI integration
- {{ render_checkbox_field(form.application.form.ai.form.gemini_key) }}
+ {{ render_checkbox_field(form.application.form.ai.form.API_keys.form.gemini) }}
Go here to read more about Google Gemini integration
diff --git a/changedetectionio/blueprint/ui/views.py b/changedetectionio/blueprint/ui/views.py
index d9ae5052..d7cd23e3 100644
--- a/changedetectionio/blueprint/ui/views.py
+++ b/changedetectionio/blueprint/ui/views.py
@@ -212,7 +212,14 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
add_paused = request.form.get('edit_and_watch_submit_button') != None
processor = request.form.get('processor', 'text_json_diff')
- new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
+ extras = {'paused': add_paused, 'processor': processor}
+
+ LLM_prompt = request.form.get('LLM_prompt', '').strip()
+ if LLM_prompt:
+ extras['LLM_prompt'] = LLM_prompt
+ extras['LLM_send_type'] = request.form.get('LLM_send_type', 'text')
+
+ new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras=extras)
if new_uuid:
if add_paused:
diff --git a/changedetectionio/forms.py b/changedetectionio/forms.py
index 4658b2ac..8c0ca435 100644
--- a/changedetectionio/forms.py
+++ b/changedetectionio/forms.py
@@ -761,6 +761,17 @@ class globalSettingsApplicationUIForm(Form):
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()])
+class globalSettingsApplicationAIKeysForm(Form):
+
+ openai = StringField('OpenAI Key',
+ validators=[validators.Optional()],
+ render_kw={"placeholder": 'xxxxxxxxx'}
+ )
+ gemini = StringField('Google Gemini Key',
+ validators=[validators.Optional()],
+ render_kw={"placeholder": 'ooooooooo'}
+ )
+
class globalSettingsApplicationAIForm(Form):
#@todo use only configured types?
@@ -768,14 +779,9 @@ class globalSettingsApplicationAIForm(Form):
choices=[('openai', 'Open AI'), ('gemini', 'Gemini')],
default="text")
- openai_key = StringField('OpenAI Key',
- validators=[validators.Optional()],
- render_kw={"placeholder": 'xxxxxxxxx'}
- )
- gemini_key = StringField('Google Gemini Key',
- validators=[validators.Optional()],
- render_kw={"placeholder": 'ooooooooo'}
- )
+ # So that we can pass this to our LLM/__init__.py as a keys dict
+ API_keys = FormField(globalSettingsApplicationAIKeysForm)
+
# datastore.data['settings']['application']..
diff --git a/changedetectionio/processors/LLM/__init__.py b/changedetectionio/processors/LLM/__init__.py
new file mode 100644
index 00000000..9081bcce
--- /dev/null
+++ b/changedetectionio/processors/LLM/__init__.py
@@ -0,0 +1,64 @@
+import importlib
+from langchain_core.messages import SystemMessage, HumanMessage
+
+SYSTEM_MESSAGE = (
+ "You are a text analyser who will attempt to give the most concise information "
+ "to the request, the information should be returned in a way that if I ask you again "
+ "I should get the same answer if the outcome is the same. The goal is to cut down "
+ "or reduce the text changes from you when i ask the same question about similar content "
+ "Always list items in exactly the same order and wording as found in the source text. "
+)
+
+
+class LLM_integrate:
+ PROVIDER_MAP = {
+ "openai": ("langchain_openai", "ChatOpenAI"),
+ "azure": ("langchain_community.chat_models", "AzureChatOpenAI"),
+ "gemini": ("langchain_google_genai", "ChatGoogleGenerativeAI")
+ }
+
+ def __init__(self, api_keys: dict):
+ """
+ api_keys = {
+ "openai": "sk-xxx",
+ "azure": "AZURE_KEY",
+ "gemini": "GEMINI_KEY"
+ }
+ """
+ self.api_keys = api_keys
+
+ def run(self, provider: str, model: str, message: str):
+ module_name, class_name = self.PROVIDER_MAP[provider]
+
+ # Import the class dynamically
+ module = importlib.import_module(module_name)
+ LLMClass = getattr(module, class_name)
+
+ # Create the LLM object
+ llm_kwargs = {}
+ if provider == "openai":
+ llm_kwargs = dict(api_key=self.api_keys.get("openai", ''),
+ model=model,
+ # https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.base.ChatOpenAI.html#langchain_openai.chat_models.base.ChatOpenAI.temperature
+ temperature=0 # most deterministic,
+ )
+ elif provider == "azure":
+ llm_kwargs = dict(
+ api_key=self.api_keys["azure"],
+ azure_endpoint="https://