Upload current progress
This commit is contained in:
1
obsidian_automator/__init__.py
Normal file
1
obsidian_automator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Package initialization
|
||||
122
obsidian_automator/agent.py
Normal file
122
obsidian_automator/agent.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
import os
|
||||
import time
|
||||
from .note_server import NoteServer
|
||||
|
||||
class ObsidianAgent:
|
||||
def __init__(self, api_key: str, server: NoteServer):
|
||||
self.client = genai.Client(api_key=api_key)
|
||||
self.server = server
|
||||
self.model_id = "gemini-3-flash-preview"
|
||||
|
||||
def process_vault(self, input_folder: str, philosophy: str, rewrite_tag: str) -> list[str]:
|
||||
"""
|
||||
Scans the input folder and processes each note using the LLM.
|
||||
Returns a list of action logs.
|
||||
"""
|
||||
logs = []
|
||||
notes = self.server.list_notes(input_folder)
|
||||
|
||||
# Derive Vault Root (Assuming input_folder is inside the vault, e.g. .../Vault/Inbox)
|
||||
# We take the parent directory of the input folder.
|
||||
vault_root = os.path.dirname(input_folder.rstrip(os.sep))
|
||||
|
||||
if not notes:
|
||||
return ["No notes found in input folder."]
|
||||
|
||||
logs.append(f"Found {len(notes)} notes. Processing (Slow mode for Rate Limits)...")
|
||||
|
||||
for i, note_path in enumerate(notes):
|
||||
# RATE LIMITING: Sleep to respect 5 RPM (1 request every 12s + buffer)
|
||||
if i > 0:
|
||||
time.sleep(15)
|
||||
|
||||
content = self.server.read_note(note_path)
|
||||
if not content:
|
||||
continue
|
||||
|
||||
log = self._process_single_note_with_retry(note_path, content, philosophy, rewrite_tag, vault_root)
|
||||
logs.append(log)
|
||||
|
||||
return logs
|
||||
|
||||
def _process_single_note_with_retry(self, note_path: str, content: str, philosophy: str, rewrite_tag: str, vault_root: str) -> str:
|
||||
"""Wraps processing with retry logic for 429 errors."""
|
||||
max_retries = 3
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return self._process_single_note(note_path, content, philosophy, rewrite_tag, vault_root)
|
||||
except Exception as e:
|
||||
if "429" in str(e) or "RESOURCE_EXHAUSTED" in str(e):
|
||||
if attempt < max_retries - 1:
|
||||
wait_time = 60 * (attempt + 1)
|
||||
print(f"Rate limit hit. Waiting {wait_time}s...")
|
||||
time.sleep(wait_time)
|
||||
continue
|
||||
return f"Error processing {os.path.basename(note_path)}: {str(e)}"
|
||||
return f"Failed after retries: {os.path.basename(note_path)}"
|
||||
|
||||
def _process_single_note(self, note_path: str, content: str, philosophy: str, rewrite_tag: str, vault_root: str) -> str:
|
||||
|
||||
# 1. Define Tools
|
||||
def move_note(target_folder: str):
|
||||
"""Moves the note to a folder relative to Vault Root. E.g. 'Science/Biology'."""
|
||||
# Ensure we don't treat target_folder as absolute unless it starts with vault_root
|
||||
if not target_folder.startswith(vault_root):
|
||||
full_target = os.path.join(vault_root, target_folder.strip(os.sep))
|
||||
else:
|
||||
full_target = target_folder
|
||||
|
||||
return self.server.move_note(note_path, full_target)
|
||||
|
||||
def flag_rewrite(reason: str):
|
||||
"""Flags the current note for rewrite by appending a tag and reason."""
|
||||
return self.server.flag_rewrite(note_path, reason, rewrite_tag)
|
||||
|
||||
# 2. Construct Prompt
|
||||
prompt = f"""
|
||||
You are an expert Knowledge Manager for an Obsidian Vault.
|
||||
|
||||
PHILOSOPHY:
|
||||
{philosophy}
|
||||
|
||||
CURRENT NOTE CONTENT:
|
||||
{content[:10000]} # Truncate to avoid massive context if note is huge
|
||||
|
||||
TASK:
|
||||
Analyze the note content against the Philosophy.
|
||||
1. If it fits well into a specific folder in the vault structure, move it there.
|
||||
2. If it is low quality, incomplete, or violates the philosophy, flag it for rewrite.
|
||||
|
||||
You MUST call a tool.
|
||||
"""
|
||||
|
||||
# 3. Call Gemini
|
||||
response = self.client.models.generate_content(
|
||||
model=self.model_id,
|
||||
contents=prompt,
|
||||
config=types.GenerateContentConfig(
|
||||
tools=[move_note, flag_rewrite],
|
||||
tool_config=types.ToolConfig(
|
||||
function_calling_config=types.FunctionCallingConfig(
|
||||
mode="ANY" # Force the model to use a tool
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# 4. Execute Tool
|
||||
if not response.function_calls:
|
||||
return f"Skipped: {os.path.basename(note_path)} (Model failed to call tool)"
|
||||
|
||||
# Execute the first function call found
|
||||
fc = response.function_calls[0]
|
||||
if fc.name == "move_note":
|
||||
result = move_note(**fc.args)
|
||||
return f"[AI] {result}"
|
||||
elif fc.name == "flag_rewrite":
|
||||
result = flag_rewrite(**fc.args)
|
||||
return f"[AI] {result}"
|
||||
else:
|
||||
return f"Error: Unknown tool called {fc.name}"
|
||||
58
obsidian_automator/components/layout.py
Normal file
58
obsidian_automator/components/layout.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import reflex as rx
|
||||
from ..state.auth import AuthState
|
||||
|
||||
def sidebar_item(text: str, icon: str, href: str):
|
||||
return rx.link(
|
||||
rx.hstack(
|
||||
rx.icon(icon, size=18),
|
||||
rx.text(text, size="3", weight="medium"),
|
||||
spacing="3",
|
||||
align_items="center",
|
||||
padding="12px",
|
||||
border_radius="8px",
|
||||
_hover={"bg": "var(--gray-3)"},
|
||||
color="var(--gray-11)"
|
||||
),
|
||||
href=href,
|
||||
width="100%",
|
||||
text_decoration="none"
|
||||
)
|
||||
|
||||
def dashboard_layout(content: rx.Component):
|
||||
return rx.flex(
|
||||
# Sidebar
|
||||
rx.vstack(
|
||||
rx.heading("Obsidian Automator", size="4", margin_bottom="6", color="var(--accent-9)"),
|
||||
|
||||
sidebar_item("Dashboard", "layout-dashboard", "/"),
|
||||
sidebar_item("Settings", "settings", "/settings"),
|
||||
|
||||
rx.spacer(),
|
||||
|
||||
rx.button(
|
||||
rx.hstack(rx.icon("log-out"), rx.text("Logout")),
|
||||
on_click=AuthState.logout,
|
||||
variant="ghost",
|
||||
width="100%",
|
||||
justify_content="start"
|
||||
),
|
||||
|
||||
width="250px",
|
||||
height="100vh",
|
||||
padding="6",
|
||||
bg="var(--gray-2)",
|
||||
border_right="1px solid var(--gray-4)",
|
||||
),
|
||||
|
||||
# Main Content
|
||||
rx.box(
|
||||
content,
|
||||
flex="1",
|
||||
height="100vh",
|
||||
bg="var(--gray-1)",
|
||||
padding="8",
|
||||
overflow="auto",
|
||||
),
|
||||
width="100vw",
|
||||
height="100vh",
|
||||
)
|
||||
50
obsidian_automator/components/terminal.py
Normal file
50
obsidian_automator/components/terminal.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import reflex as rx
|
||||
|
||||
def terminal_window(logs: list[str], title="System Output"):
|
||||
return rx.card(
|
||||
rx.vstack(
|
||||
rx.hstack(
|
||||
rx.icon("terminal", size=18, color="var(--gray-11)"),
|
||||
rx.text(title, size="2", weight="bold", color="var(--gray-11)"),
|
||||
rx.spacer(),
|
||||
# Minimal header controls (visual only for now)
|
||||
rx.box(width="10px", height="10px", bg="var(--red-9)", border_radius="50%"),
|
||||
rx.box(width="10px", height="10px", bg="var(--yellow-9)", border_radius="50%"),
|
||||
rx.box(width="10px", height="10px", bg="var(--green-9)", border_radius="50%"),
|
||||
align_items="center",
|
||||
width="100%",
|
||||
padding_bottom="3",
|
||||
border_bottom="1px solid var(--gray-4)"
|
||||
),
|
||||
rx.scroll_area(
|
||||
rx.vstack(
|
||||
rx.foreach(
|
||||
logs,
|
||||
lambda log: rx.box(
|
||||
rx.text(
|
||||
log,
|
||||
font_family="JetBrains Mono, monospace",
|
||||
font_size="13px",
|
||||
color="var(--gray-12)",
|
||||
white_space="pre-wrap" # Handle multi-line logs
|
||||
),
|
||||
padding_y="1"
|
||||
)
|
||||
),
|
||||
align_items="start",
|
||||
spacing="0",
|
||||
),
|
||||
height="400px", # Fixed height for scroll
|
||||
type="always",
|
||||
scrollbars="vertical",
|
||||
bg="var(--gray-2)",
|
||||
padding="4",
|
||||
border_radius="0 0 8px 8px"
|
||||
),
|
||||
spacing="0",
|
||||
width="100%"
|
||||
),
|
||||
padding="0",
|
||||
variant="surface",
|
||||
width="100%"
|
||||
)
|
||||
216
obsidian_automator/couch_manager.py
Normal file
216
obsidian_automator/couch_manager.py
Normal file
@@ -0,0 +1,216 @@
|
||||
import couchdb
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
from urllib.parse import quote
|
||||
|
||||
class CouchDBManager:
|
||||
def __init__(self, url, username, password, db_name):
|
||||
self.url = url.rstrip("/")
|
||||
self.username = username.strip()
|
||||
self.password = password.strip()
|
||||
self.db_name = db_name
|
||||
|
||||
def connect(self):
|
||||
try:
|
||||
if self.username and self.password:
|
||||
if "://" in self.url:
|
||||
protocol, rest = self.url.split("://", 1)
|
||||
else:
|
||||
protocol, rest = "http", self.url
|
||||
safe_user = quote(self.username, safe="")
|
||||
safe_pass = quote(self.password, safe="")
|
||||
full_url = f"{protocol}://{safe_user}:{safe_pass}@{rest}"
|
||||
else:
|
||||
full_url = self.url
|
||||
|
||||
server = couchdb.Server(full_url)
|
||||
return server[self.db_name]
|
||||
except Exception as e:
|
||||
raise Exception(f"CouchDB Connection Error: {e}")
|
||||
|
||||
def _calculate_size(self, db, children):
|
||||
"""Calculate actual content size from chunks (LiveSync compatible)"""
|
||||
size = 0
|
||||
for chunk_id in children:
|
||||
if chunk_id in db:
|
||||
chunk = db[chunk_id]
|
||||
data = chunk.get("data", chunk.get("content", ""))
|
||||
size += len(str(data))
|
||||
return size
|
||||
|
||||
def list_files(self, prefix_filter=""):
|
||||
"""Returns a dict: {id: path} for all 'file' documents."""
|
||||
db = self.connect()
|
||||
files = {}
|
||||
# Use _all_docs to scan quickly
|
||||
for row in db.view('_all_docs', include_docs=True):
|
||||
doc = row.doc
|
||||
# Identify file metadata docs (has children array)
|
||||
if "children" in doc and isinstance(doc["children"], list):
|
||||
path = doc.get("path", doc["_id"])
|
||||
# Filter by prefix (folder) if requested
|
||||
# Normalizing path separators to /
|
||||
norm_path = path.replace("\\", "/")
|
||||
norm_prefix = prefix_filter.replace("\\", "/")
|
||||
|
||||
# Debug print for first few items to diagnose filtering mismatch
|
||||
if len(files) < 3:
|
||||
print(f"DEBUG: Found path '{norm_path}'. Filter: '{norm_prefix}'")
|
||||
|
||||
if not prefix_filter or norm_path.startswith(norm_prefix):
|
||||
files[doc["_id"]] = path
|
||||
|
||||
print(f"DEBUG: list_files returning {len(files)} files after filtering.")
|
||||
return files
|
||||
|
||||
def read_file_content(self, doc_id):
|
||||
"""Reconstructs file content from chunks."""
|
||||
db = self.connect()
|
||||
doc = db[doc_id]
|
||||
content = []
|
||||
|
||||
for chunk_id in doc.get("children", []):
|
||||
if chunk_id in db:
|
||||
chunk = db[chunk_id]
|
||||
data = chunk.get("data") or chunk.get("content") or ""
|
||||
# Check encryption
|
||||
if str(data).startswith("%") or chunk.get("e_"):
|
||||
return "[ENCRYPTED_CONTENT]"
|
||||
content.append(str(data))
|
||||
|
||||
return "".join(content)
|
||||
|
||||
def move_file(self, doc_id, target_folder):
|
||||
"""
|
||||
LiveSync-compatible file move:
|
||||
Updates the path and _id in place without breaking chunk references.
|
||||
NOTE: This approach updates the document but LiveSync may still need
|
||||
to reconcile the change. For best results, use Obsidian's built-in move.
|
||||
"""
|
||||
db = self.connect()
|
||||
print(f"DEBUG: Attempting to move doc_id: {repr(doc_id)} to '{target_folder}'")
|
||||
|
||||
doc = None
|
||||
actual_doc_id = None
|
||||
try:
|
||||
# Try direct lookup first (faster)
|
||||
if doc_id in db:
|
||||
doc = db[doc_id]
|
||||
actual_doc_id = doc_id
|
||||
else:
|
||||
# Fallback: case-insensitive scan
|
||||
doc_id_lower = doc_id.lower()
|
||||
for row in db.view('_all_docs', include_docs=True):
|
||||
if row.id.lower() == doc_id_lower:
|
||||
doc = row.doc
|
||||
actual_doc_id = row.id
|
||||
break
|
||||
|
||||
if not doc:
|
||||
return f"Error: Document {doc_id} not found in DB (Scan failed)."
|
||||
|
||||
except Exception as e:
|
||||
return f"Error fetching document {doc_id}: {e}"
|
||||
|
||||
old_path = doc.get("path", actual_doc_id)
|
||||
filename = os.path.basename(old_path)
|
||||
|
||||
# Construct new path
|
||||
target_folder = target_folder.strip("/")
|
||||
new_path = f"{target_folder}/{filename}"
|
||||
new_doc_id = new_path.lower()
|
||||
|
||||
# Check if already at target
|
||||
if actual_doc_id == new_doc_id:
|
||||
return f"Already at {new_path}"
|
||||
|
||||
# IMPORTANT: For LiveSync compatibility, we need to:
|
||||
# 1. Update path field (tells LiveSync where file should be)
|
||||
# 2. Update mtime (triggers sync)
|
||||
# 3. Keep all other metadata intact
|
||||
|
||||
# Update in place first
|
||||
doc["path"] = new_path
|
||||
doc["mtime"] = int(time.time() * 1000)
|
||||
db.save(doc)
|
||||
|
||||
# If the doc_id needs to change, create new doc and delete old
|
||||
if actual_doc_id != new_doc_id:
|
||||
# Check if target exists
|
||||
if new_doc_id in db:
|
||||
# Target exists - merge/update it instead
|
||||
target_doc = db[new_doc_id]
|
||||
target_doc["path"] = new_path
|
||||
target_doc["mtime"] = int(time.time() * 1000)
|
||||
target_doc["children"] = doc["children"] # Use latest chunks
|
||||
target_doc["size"] = self._calculate_size(db, doc["children"]) # Recalculate size
|
||||
target_doc["ctime"] = doc.get("ctime", target_doc.get("ctime"))
|
||||
target_doc["type"] = doc.get("type", "plain")
|
||||
target_doc["eden"] = doc.get("eden", {})
|
||||
db.save(target_doc)
|
||||
# Delete old
|
||||
db.delete(doc)
|
||||
return f"Merged and moved to {new_path}"
|
||||
else:
|
||||
# Create new doc with new ID
|
||||
new_doc = doc.copy()
|
||||
new_doc["_id"] = new_doc_id
|
||||
new_doc["path"] = new_path
|
||||
new_doc["size"] = self._calculate_size(db, new_doc["children"]) # Recalculate size
|
||||
del new_doc["_rev"]
|
||||
db.save(new_doc)
|
||||
# Delete old
|
||||
old_doc = db[actual_doc_id] # Refresh to get latest _rev
|
||||
db.delete(old_doc)
|
||||
|
||||
return f"Moved to {new_path}"
|
||||
|
||||
def flag_rewrite(self, doc_id, reason, tag):
|
||||
"""
|
||||
Appends a rewrite tag.
|
||||
"""
|
||||
db = self.connect()
|
||||
print(f"DEBUG: Flagging rewrite for doc_id: {repr(doc_id)}")
|
||||
|
||||
doc = None
|
||||
try:
|
||||
# Try direct lookup first (faster)
|
||||
if doc_id in db:
|
||||
doc = db[doc_id]
|
||||
else:
|
||||
# Fallback: case-insensitive scan
|
||||
doc_id_lower = doc_id.lower()
|
||||
for row in db.view('_all_docs', include_docs=True):
|
||||
if row.id.lower() == doc_id_lower:
|
||||
doc = row.doc
|
||||
break
|
||||
|
||||
if not doc:
|
||||
return f"Error: Document {doc_id} not found (Scan failed)."
|
||||
|
||||
except Exception as e:
|
||||
return f"Error fetching doc for rewrite {doc_id}: {e}"
|
||||
|
||||
# Create new chunk
|
||||
chunk_id = f"h:{int(time.time())}" # Simple ID generation
|
||||
chunk_content = f"\n\n{tag} {reason}\n"
|
||||
|
||||
chunk_doc = {
|
||||
"_id": chunk_id,
|
||||
"data": chunk_content,
|
||||
"type": "chunk" # LiveSync convention
|
||||
}
|
||||
|
||||
db.save(chunk_doc)
|
||||
|
||||
# Update metadata to point to new chunk
|
||||
doc["children"].append(chunk_id)
|
||||
doc["mtime"] = int(time.time() * 1000)
|
||||
|
||||
# CRITICAL: Update size to match actual content size
|
||||
doc["size"] = self._calculate_size(db, doc["children"])
|
||||
|
||||
db.save(doc)
|
||||
|
||||
return f"Flagged {doc.get('path')} for rewrite."
|
||||
108
obsidian_automator/couch_sync.py
Normal file
108
obsidian_automator/couch_sync.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import couchdb
|
||||
import os
|
||||
import base64
|
||||
import json
|
||||
from urllib.parse import quote
|
||||
|
||||
class CouchDBSync:
|
||||
def __init__(self, url, username, password, db_name, passphrase=""):
|
||||
self.url = url.rstrip("/")
|
||||
self.username = username.strip()
|
||||
self.password = password.strip()
|
||||
self.db_name = db_name
|
||||
self.passphrase = passphrase
|
||||
|
||||
def connect(self):
|
||||
"""Establishes connection to CouchDB."""
|
||||
try:
|
||||
if self.username and self.password:
|
||||
if "://" in self.url:
|
||||
protocol, rest = self.url.split("://", 1)
|
||||
else:
|
||||
protocol, rest = "http", self.url
|
||||
safe_user = quote(self.username, safe="")
|
||||
safe_pass = quote(self.password, safe="")
|
||||
full_url = f"{protocol}://{safe_user}:{safe_pass}@{rest}"
|
||||
else:
|
||||
full_url = self.url
|
||||
|
||||
server = couchdb.Server(full_url)
|
||||
_ = server.version()
|
||||
return server
|
||||
except Exception as e:
|
||||
raise Exception(f"Connection Failed: {str(e)}")
|
||||
|
||||
def fetch_notes(self, target_folder: str) -> list[str]:
|
||||
logs = []
|
||||
try:
|
||||
server = self.connect()
|
||||
if self.db_name not in server:
|
||||
return [f"Error: Database '{self.db_name}' not found."]
|
||||
|
||||
db = server[self.db_name]
|
||||
logs.append(f"Connected to {self.db_name}.")
|
||||
|
||||
if not os.path.exists(target_folder):
|
||||
os.makedirs(target_folder, exist_ok=True)
|
||||
|
||||
# 1. Fetch all docs with content in ONE request
|
||||
logs.append("Fetching all documents...")
|
||||
all_docs = {}
|
||||
# include_docs=True is much faster than 278 individual requests
|
||||
for row in db.view('_all_docs', include_docs=True):
|
||||
doc = row.doc
|
||||
if not doc['_id'].startswith("_design"):
|
||||
all_docs[doc['_id']] = doc
|
||||
|
||||
logs.append(f"Retrieved {len(all_docs)} documents. Reconstructing notes...")
|
||||
|
||||
note_count = 0
|
||||
|
||||
# 2. Iterate to find "Metadata" documents
|
||||
for doc_id, doc in all_docs.items():
|
||||
# Self-hosted LiveSync uses 'children' for files
|
||||
if "children" in doc and isinstance(doc["children"], list):
|
||||
|
||||
relative_path = doc.get("path", doc_id)
|
||||
|
||||
# Construct full local path
|
||||
safe_path = relative_path.replace(":", "-").replace("|", "-")
|
||||
full_local_path = os.path.join(target_folder, safe_path)
|
||||
|
||||
# 3. Reconstruct Content
|
||||
full_content = []
|
||||
is_encrypted = False
|
||||
|
||||
for chunk_id in doc["children"]:
|
||||
chunk = all_docs.get(chunk_id)
|
||||
if not chunk:
|
||||
continue
|
||||
|
||||
chunk_data = chunk.get("data") or chunk.get("content") or ""
|
||||
|
||||
if str(chunk_data).startswith("%") or chunk.get("e_"):
|
||||
is_encrypted = True
|
||||
break
|
||||
|
||||
full_content.append(str(chunk_data))
|
||||
|
||||
if is_encrypted:
|
||||
continue
|
||||
|
||||
note_text = "".join(full_content)
|
||||
|
||||
# Only save if there is content or it's a known empty file
|
||||
if note_text.strip() or len(doc["children"]) > 0:
|
||||
os.makedirs(os.path.dirname(full_local_path), exist_ok=True)
|
||||
with open(full_local_path, "w", encoding="utf-8") as f:
|
||||
f.write(note_text)
|
||||
note_count += 1
|
||||
if note_count % 10 == 0:
|
||||
logs.append(f"Reconstructed {note_count} notes...")
|
||||
|
||||
logs.append(f"Sync Complete. Reconstructed {note_count} total notes.")
|
||||
|
||||
except Exception as e:
|
||||
logs.append(f"Sync Error: {str(e)}")
|
||||
|
||||
return logs
|
||||
25
obsidian_automator/models.py
Normal file
25
obsidian_automator/models.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import reflex as rx
|
||||
from sqlmodel import Field
|
||||
from typing import Optional
|
||||
|
||||
class User(rx.Model, table=True):
|
||||
"""Admin user credentials."""
|
||||
username: str = Field(index=True)
|
||||
password_hash: str
|
||||
|
||||
class Configuration(rx.Model, table=True):
|
||||
"""Persistent application settings."""
|
||||
# Gemini
|
||||
gemini_api_key: Optional[str] = Field(default="")
|
||||
input_folder: Optional[str] = Field(default="")
|
||||
rewrite_tag: Optional[str] = Field(default="#rewrite")
|
||||
philosophy: Optional[str] = Field(default="Keep notes atomic. Use Zettelkasten principles.")
|
||||
|
||||
# CouchDB
|
||||
couchdb_url: Optional[str] = Field(default="")
|
||||
couchdb_user: Optional[str] = Field(default="")
|
||||
couchdb_password: Optional[str] = Field(default="")
|
||||
couchdb_passphrase: Optional[str] = Field(default="")
|
||||
couchdb_db_name: Optional[str] = Field(default="obsidian_livesync")
|
||||
|
||||
is_configured: bool = Field(default=False)
|
||||
136
obsidian_automator/note_server.py
Normal file
136
obsidian_automator/note_server.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import os
|
||||
import shutil
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List
|
||||
|
||||
class NoteServer(ABC):
|
||||
@abstractmethod
|
||||
def list_notes(self, directory: str) -> List[str]:
|
||||
"""List all markdown files in the directory."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_note(self, file_path: str) -> str:
|
||||
"""Read content of a note."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def move_note(self, file_path: str, target_folder: str) -> str:
|
||||
"""Move a note to a target folder."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def flag_rewrite(self, file_path: str, reason: str, rewrite_tag: str) -> str:
|
||||
"""Append a rewrite tag and reason to the note."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_subfolders(self, directory: str) -> List[str]:
|
||||
"""List immediate subdirectories."""
|
||||
pass
|
||||
|
||||
class FileSystemServer(NoteServer):
|
||||
def list_subfolders(self, directory: str) -> List[str]:
|
||||
if not os.path.exists(directory):
|
||||
return []
|
||||
try:
|
||||
return [d for d in os.listdir(directory) if os.path.isdir(os.path.join(directory, d)) and not d.startswith(".")]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def list_notes(self, directory: str) -> List[str]:
|
||||
notes = []
|
||||
if not os.path.exists(directory):
|
||||
return []
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if file.endswith(".md"):
|
||||
# Only return absolute paths to keep things simple
|
||||
notes.append(os.path.abspath(os.path.join(root, file)))
|
||||
return notes
|
||||
|
||||
def read_note(self, file_path: str) -> str:
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
return f.read()
|
||||
except Exception as e:
|
||||
return f"Error reading file: {str(e)}"
|
||||
|
||||
def move_note(self, file_path: str, target_folder: str) -> str:
|
||||
try:
|
||||
filename = os.path.basename(file_path)
|
||||
|
||||
# Ensure target folder exists
|
||||
if not os.path.exists(target_folder):
|
||||
os.makedirs(target_folder)
|
||||
|
||||
new_path = os.path.join(target_folder, filename)
|
||||
|
||||
# Prevent overwriting by appending timestamp if exists, or just fail safely
|
||||
if os.path.exists(new_path):
|
||||
return f"Error: File {filename} already exists in {target_folder}"
|
||||
|
||||
shutil.move(file_path, new_path)
|
||||
return f"Moved {filename} to {target_folder}"
|
||||
except Exception as e:
|
||||
return f"Error moving file: {str(e)}"
|
||||
|
||||
def flag_rewrite(self, file_path: str, reason: str, rewrite_tag: str) -> str:
|
||||
try:
|
||||
# Check if file ends with newline to avoid appending on same line
|
||||
with open(file_path, "r+", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
prefix = "\n" if content and not content.endswith("\n") else ""
|
||||
f.write(f"{prefix}\n{rewrite_tag} {reason}\n")
|
||||
return f"Flagged {os.path.basename(file_path)} for rewrite: {reason}"
|
||||
except Exception as e:
|
||||
return f"Error flagging file: {str(e)}"
|
||||
|
||||
from .couch_manager import CouchDBManager
|
||||
|
||||
class CouchDBNoteServer(NoteServer):
|
||||
def __init__(self, url, user, password, db_name):
|
||||
self.manager = CouchDBManager(url, user, password, db_name)
|
||||
|
||||
def list_notes(self, directory: str) -> List[str]:
|
||||
# Directory here acts as a prefix filter
|
||||
files_dict = self.manager.list_files(prefix_filter=directory)
|
||||
return list(files_dict.keys())
|
||||
|
||||
def list_subfolders(self, directory: str) -> List[str]:
|
||||
# CouchDB is flat. We simulate folders by looking at paths.
|
||||
# This is expensive (scan all), but accurate.
|
||||
all_files_dict = self.manager.list_files()
|
||||
subfolders = set()
|
||||
|
||||
# If directory is "(All Notes)", we look at root folders
|
||||
# If directory is "Inbox", we look at "Inbox/Subfolder"
|
||||
|
||||
prefix = directory if directory != "(All Notes)" else ""
|
||||
prefix = prefix.strip("/")
|
||||
|
||||
for doc_id, path in all_files_dict.items():
|
||||
|
||||
# Normalize path
|
||||
path = path.replace("\\", "/")
|
||||
|
||||
if prefix and not path.startswith(prefix + "/"):
|
||||
continue
|
||||
|
||||
# Strip prefix
|
||||
relative_path = path[len(prefix)+1:] if prefix else path
|
||||
|
||||
if "/" in relative_path:
|
||||
top_level = relative_path.split("/")[0]
|
||||
subfolders.add(top_level)
|
||||
|
||||
return sorted(list(subfolders))
|
||||
|
||||
def read_note(self, file_path: str) -> str:
|
||||
return self.manager.read_file_content(file_path)
|
||||
|
||||
def move_note(self, file_path: str, target_folder: str) -> str:
|
||||
return self.manager.move_file(file_path, target_folder)
|
||||
|
||||
def flag_rewrite(self, file_path: str, reason: str, rewrite_tag: str) -> str:
|
||||
return self.manager.flag_rewrite(file_path, reason, rewrite_tag)
|
||||
21
obsidian_automator/obsidian_automator.py
Normal file
21
obsidian_automator/obsidian_automator.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import reflex as rx
|
||||
from .pages.dashboard import dashboard_page, settings_page
|
||||
from .pages.login import login_page
|
||||
from .pages.setup import setup_page
|
||||
from .state.auth import AuthState
|
||||
from .state.app_state import AppState
|
||||
|
||||
app = rx.App(
|
||||
theme=rx.theme(
|
||||
appearance="dark",
|
||||
accent_color="iris",
|
||||
gray_color="slate",
|
||||
radius="medium",
|
||||
)
|
||||
)
|
||||
|
||||
# Routes
|
||||
app.add_page(dashboard_page, route="/", on_load=[AuthState.on_load_check, AppState.start_scheduler, AppState.load_config, AppState.reset_state])
|
||||
app.add_page(settings_page, route="/settings", on_load=[AuthState.on_load_check, AppState.load_config])
|
||||
app.add_page(login_page, route="/login")
|
||||
app.add_page(setup_page, route="/setup")
|
||||
165
obsidian_automator/pages/dashboard.py
Normal file
165
obsidian_automator/pages/dashboard.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import reflex as rx
|
||||
from ..state.app_state import AppState
|
||||
from ..components.layout import dashboard_layout
|
||||
from ..components.terminal import terminal_window
|
||||
|
||||
def dashboard_page():
|
||||
return dashboard_layout(
|
||||
rx.vstack(
|
||||
rx.hstack(
|
||||
rx.heading("Dashboard", size="8"),
|
||||
rx.spacer(),
|
||||
|
||||
rx.button(
|
||||
rx.hstack(rx.icon("download"), rx.text("Sync from CouchDB")),
|
||||
on_click=AppState.run_couch_sync,
|
||||
loading=AppState.is_running,
|
||||
variant="soft",
|
||||
color_scheme="orange",
|
||||
size="3",
|
||||
margin_right="2",
|
||||
disabled=AppState.is_running
|
||||
),
|
||||
|
||||
rx.select(
|
||||
AppState.available_subfolders,
|
||||
value=AppState.selected_subfolder,
|
||||
on_change=AppState.set_selected_subfolder,
|
||||
placeholder="Select Target Folder",
|
||||
size="3",
|
||||
margin_right="2",
|
||||
width="200px"
|
||||
),
|
||||
|
||||
rx.icon_button(
|
||||
rx.icon("refresh-cw"),
|
||||
on_click=AppState.refresh_subfolders,
|
||||
variant="soft",
|
||||
size="3",
|
||||
margin_right="2"
|
||||
),
|
||||
|
||||
rx.button(
|
||||
rx.hstack(rx.icon("play"), rx.text("Run Agent Now")),
|
||||
on_click=AppState.run_agent_process,
|
||||
loading=AppState.is_running,
|
||||
size="3",
|
||||
disabled=AppState.is_running
|
||||
),
|
||||
width="100%",
|
||||
align_items="center",
|
||||
margin_bottom="6"
|
||||
),
|
||||
|
||||
rx.grid(
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.text("Scheduler", size="2", color="gray.11", weight="bold"),
|
||||
rx.text("Active", size="6", weight="bold"),
|
||||
rx.text("Daily @ 02:00", size="1", color="gray.10"),
|
||||
spacing="1"
|
||||
)
|
||||
),
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.text("Model", size="2", color="gray.11", weight="bold"),
|
||||
rx.text("Gemini Flash", size="6", weight="bold"),
|
||||
rx.text("v3 Preview", size="1", color="gray.10"),
|
||||
spacing="1"
|
||||
)
|
||||
),
|
||||
columns="2",
|
||||
spacing="4",
|
||||
width="100%"
|
||||
),
|
||||
|
||||
rx.box(
|
||||
terminal_window(AppState.logs, title="Agent Activity Log"),
|
||||
width="100%",
|
||||
box_shadow="0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06)"
|
||||
),
|
||||
|
||||
spacing="6",
|
||||
width="100%"
|
||||
)
|
||||
)
|
||||
|
||||
def settings_page():
|
||||
return dashboard_layout(
|
||||
rx.vstack(
|
||||
rx.heading("Settings", size="8", margin_bottom="6"),
|
||||
|
||||
rx.tabs.root(
|
||||
rx.tabs.list(
|
||||
rx.tabs.trigger("General", value="general"),
|
||||
rx.tabs.trigger("CouchDB Sync", value="couchdb"),
|
||||
),
|
||||
|
||||
rx.tabs.content(
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.text("Gemini API Key", weight="bold"),
|
||||
rx.input(value=AppState.api_key, on_change=AppState.set_api_key, type="password", width="100%"),
|
||||
|
||||
rx.text("Input Folder Path", weight="bold"),
|
||||
rx.input(value=AppState.input_folder, on_change=AppState.set_input_folder, width="100%"),
|
||||
|
||||
rx.text("Rewrite Tag", weight="bold"),
|
||||
rx.input(value=AppState.rewrite_tag, on_change=AppState.set_rewrite_tag, width="100%"),
|
||||
|
||||
rx.text("Vault Philosophy", weight="bold"),
|
||||
rx.text_area(value=AppState.philosophy, on_change=AppState.set_philosophy, height="200px", width="100%"),
|
||||
|
||||
rx.button("Save Changes", on_click=AppState.save_config, size="3", margin_top="4"),
|
||||
|
||||
spacing="4",
|
||||
align_items="start",
|
||||
padding="6"
|
||||
),
|
||||
width="100%",
|
||||
max_width="800px",
|
||||
margin_top="4"
|
||||
),
|
||||
value="general"
|
||||
),
|
||||
|
||||
rx.tabs.content(
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.heading("CouchDB Configuration", size="4"),
|
||||
rx.text("Connect to your Obsidian LiveSync database to download notes.", color="gray.11", size="2"),
|
||||
|
||||
rx.text("Database URL", weight="bold", margin_top="2"),
|
||||
rx.input(placeholder="http://localhost:5984", value=AppState.couch_url, on_change=AppState.set_couch_url, width="100%"),
|
||||
|
||||
rx.text("Database Name", weight="bold"),
|
||||
rx.input(placeholder="obsidian_livesync", value=AppState.couch_db, on_change=AppState.set_couch_db, width="100%"),
|
||||
|
||||
rx.text("Username (Optional)", weight="bold"),
|
||||
rx.input(value=AppState.couch_user, on_change=AppState.set_couch_user, width="100%"),
|
||||
|
||||
rx.text("Password (Optional)", weight="bold"),
|
||||
rx.input(type="password", value=AppState.couch_pass, on_change=AppState.set_couch_pass, width="100%"),
|
||||
|
||||
rx.text("Decryption Passphrase (Experimental)", weight="bold"),
|
||||
rx.input(type="password", value=AppState.couch_passphrase, on_change=AppState.set_couch_passphrase, width="100%"),
|
||||
|
||||
rx.button("Save Changes", on_click=AppState.save_config, size="3", margin_top="4"),
|
||||
|
||||
spacing="4",
|
||||
align_items="start",
|
||||
padding="6"
|
||||
),
|
||||
width="100%",
|
||||
max_width="800px",
|
||||
margin_top="4"
|
||||
),
|
||||
value="couchdb"
|
||||
),
|
||||
|
||||
defaultValue="general",
|
||||
width="100%"
|
||||
),
|
||||
width="100%"
|
||||
)
|
||||
)
|
||||
26
obsidian_automator/pages/login.py
Normal file
26
obsidian_automator/pages/login.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import reflex as rx
|
||||
from ..state.auth import AuthState
|
||||
|
||||
def login_page():
|
||||
return rx.center(
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.heading("Login", size="7", margin_bottom="4"),
|
||||
|
||||
rx.text("Username", size="2", weight="bold"),
|
||||
rx.input(value=AuthState.username, on_change=AuthState.set_username, width="100%"),
|
||||
|
||||
rx.text("Password", size="2", weight="bold"),
|
||||
rx.input(type="password", value=AuthState.password, on_change=AuthState.set_password, width="100%"),
|
||||
|
||||
rx.button("Sign In", size="3", width="100%", on_click=AuthState.check_login, margin_top="4"),
|
||||
|
||||
padding="6",
|
||||
spacing="3",
|
||||
align_items="start"
|
||||
),
|
||||
width="400px",
|
||||
),
|
||||
height="100vh",
|
||||
bg="var(--gray-1)"
|
||||
)
|
||||
44
obsidian_automator/pages/setup.py
Normal file
44
obsidian_automator/pages/setup.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import reflex as rx
|
||||
from ..state.auth import AuthState
|
||||
|
||||
def setup_step(title: str, children: list):
|
||||
return rx.vstack(
|
||||
rx.heading(title, size="6", margin_bottom="4"),
|
||||
*children,
|
||||
width="100%",
|
||||
max_width="500px",
|
||||
spacing="4",
|
||||
align_items="start"
|
||||
)
|
||||
|
||||
def setup_page():
|
||||
return rx.center(
|
||||
rx.card(
|
||||
rx.vstack(
|
||||
rx.heading("Welcome to Obsidian Automator", size="8", color="var(--accent-9)"),
|
||||
rx.text("Let's secure your agent and configure your vault.", color="gray.11", margin_bottom="6"),
|
||||
|
||||
rx.separator(margin_bottom="6"),
|
||||
|
||||
rx.text("Create Admin Account", weight="bold"),
|
||||
rx.input(placeholder="Username", value=AuthState.username, on_change=AuthState.set_username, width="100%"),
|
||||
rx.input(placeholder="Password", type="password", value=AuthState.password, on_change=AuthState.set_password, width="100%"),
|
||||
rx.input(placeholder="Confirm Password", type="password", value=AuthState.confirm_password, on_change=AuthState.set_confirm_password, width="100%"),
|
||||
|
||||
rx.separator(margin_y="4"),
|
||||
|
||||
rx.text("Agent Configuration", weight="bold"),
|
||||
rx.input(placeholder="Gemini API Key", value=AuthState.setup_api_key, on_change=AuthState.set_setup_api_key, width="100%", type="password"),
|
||||
rx.input(placeholder="Vault Input Path", value=AuthState.setup_input_folder, on_change=AuthState.set_setup_input_folder, width="100%"),
|
||||
rx.text_area(placeholder="Vault Philosophy (e.g. Zettelkasten rules)", value=AuthState.setup_philosophy, on_change=AuthState.set_setup_philosophy, width="100%", height="100px"),
|
||||
|
||||
rx.button("Complete Setup", size="4", width="100%", on_click=AuthState.create_admin_account, margin_top="4"),
|
||||
|
||||
padding="8",
|
||||
),
|
||||
size="4",
|
||||
width="600px"
|
||||
),
|
||||
height="100vh",
|
||||
bg="var(--gray-2)"
|
||||
)
|
||||
270
obsidian_automator/state/app_state.py
Normal file
270
obsidian_automator/state/app_state.py
Normal file
@@ -0,0 +1,270 @@
|
||||
import reflex as rx
|
||||
import asyncio
|
||||
import schedule
|
||||
import os
|
||||
from sqlmodel import select
|
||||
from ..models import Configuration
|
||||
from ..agent import ObsidianAgent
|
||||
from ..note_server import FileSystemServer, CouchDBNoteServer
|
||||
from ..couch_sync import CouchDBSync
|
||||
|
||||
class AppState(rx.State):
|
||||
# Live Status
|
||||
logs: list[str] = ["Waiting for agent..."]
|
||||
is_running: bool = False
|
||||
_scheduler_task: bool = False
|
||||
|
||||
# Configuration (Loaded from DB)
|
||||
api_key: str = ""
|
||||
input_folder: str = "" # Used as Prefix for CouchDB mode
|
||||
philosophy: str = ""
|
||||
rewrite_tag: str = ""
|
||||
|
||||
# CouchDB Config
|
||||
couch_url: str = ""
|
||||
couch_user: str = ""
|
||||
couch_pass: str = ""
|
||||
couch_passphrase: str = ""
|
||||
couch_db: str = ""
|
||||
|
||||
# Subfolder Selection
|
||||
available_subfolders: list[str] = []
|
||||
selected_subfolder: str = ""
|
||||
|
||||
def _get_active_server(self):
|
||||
"""Returns the appropriate NoteServer based on config."""
|
||||
if self.couch_url:
|
||||
return CouchDBNoteServer(self.couch_url, self.couch_user, self.couch_pass, self.couch_db)
|
||||
else:
|
||||
return FileSystemServer()
|
||||
|
||||
def load_config(self):
|
||||
"""Loads configuration from the database into State."""
|
||||
with rx.session() as session:
|
||||
config = session.exec(select(Configuration)).first()
|
||||
if config:
|
||||
self.api_key = config.gemini_api_key
|
||||
self.input_folder = config.input_folder
|
||||
self.philosophy = config.philosophy
|
||||
self.rewrite_tag = config.rewrite_tag
|
||||
|
||||
# CouchDB
|
||||
self.couch_url = config.couchdb_url or ""
|
||||
self.couch_user = config.couchdb_user or ""
|
||||
self.couch_pass = config.couchdb_password or ""
|
||||
self.couch_passphrase = config.couchdb_passphrase or ""
|
||||
self.couch_db = config.couchdb_db_name or "obsidian_livesync"
|
||||
|
||||
# Auto-refresh folders on load
|
||||
# Use asyncio.create_task to run async refresh if needed,
|
||||
# but refresh_subfolders is sync in implementation for now (though server calls might block)
|
||||
# We will call it directly but handle errors gracefully
|
||||
try:
|
||||
self.refresh_subfolders()
|
||||
except:
|
||||
pass
|
||||
|
||||
def refresh_subfolders(self):
|
||||
"""Scans the input folder for subdirectories."""
|
||||
try:
|
||||
server = self._get_active_server()
|
||||
# If using CouchDB, input_folder acts as a root prefix (e.g. "Inbox")
|
||||
# If using FS, it's a path.
|
||||
root = self.input_folder or ""
|
||||
|
||||
folders = server.list_subfolders(root)
|
||||
self.available_subfolders = ["(All Notes)"] + sorted(folders)
|
||||
if not self.selected_subfolder:
|
||||
self.selected_subfolder = "(All Notes)"
|
||||
except Exception as e:
|
||||
print(f"Error refreshing folders: {e}")
|
||||
self.available_subfolders = ["(Error)"]
|
||||
|
||||
def save_config(self):
|
||||
"""Persists current state config back to DB."""
|
||||
with rx.session() as session:
|
||||
config = session.exec(select(Configuration)).first()
|
||||
if not config:
|
||||
config = Configuration()
|
||||
session.add(config)
|
||||
|
||||
config.gemini_api_key = self.api_key
|
||||
config.input_folder = self.input_folder
|
||||
config.philosophy = self.philosophy
|
||||
config.rewrite_tag = self.rewrite_tag
|
||||
|
||||
# CouchDB
|
||||
config.couchdb_url = self.couch_url
|
||||
config.couchdb_user = self.couch_user
|
||||
config.couchdb_password = self.couch_pass
|
||||
config.couchdb_passphrase = self.couch_passphrase
|
||||
config.couchdb_db_name = self.couch_db
|
||||
|
||||
session.add(config)
|
||||
session.commit()
|
||||
|
||||
try:
|
||||
self.refresh_subfolders()
|
||||
except:
|
||||
pass
|
||||
|
||||
return rx.window_alert("Configuration saved.")
|
||||
|
||||
async def run_couch_sync(self):
|
||||
"""Triggers the CouchDB download."""
|
||||
async with self:
|
||||
if self.is_running: return
|
||||
self.is_running = True
|
||||
self.logs.append("--- Starting CouchDB Sync ---")
|
||||
self.load_config()
|
||||
|
||||
if not self.couch_url:
|
||||
self.logs.append("Error: CouchDB URL not configured.")
|
||||
self.is_running = False
|
||||
return
|
||||
|
||||
try:
|
||||
syncer = CouchDBSync(self.couch_url, self.couch_user, self.couch_pass, self.couch_db, self.couch_passphrase)
|
||||
|
||||
sync_logs = await asyncio.to_thread(
|
||||
syncer.fetch_notes,
|
||||
self.input_folder
|
||||
)
|
||||
|
||||
async with self:
|
||||
self.logs.extend(sync_logs)
|
||||
self.logs.append("--- CouchDB Sync Complete ---")
|
||||
self.refresh_subfolders() # Auto-refresh after sync
|
||||
except Exception as e:
|
||||
async with self:
|
||||
self.logs.append(f"Sync Error: {str(e)}")
|
||||
finally:
|
||||
async with self:
|
||||
self.is_running = False
|
||||
|
||||
def reset_state(self):
|
||||
"""Force reset running state on load."""
|
||||
self.is_running = False
|
||||
|
||||
async def run_agent_process(self):
|
||||
"""Runs the agent and provides real-time feedback."""
|
||||
if self.is_running:
|
||||
yield rx.window_alert("Agent is already running!")
|
||||
return
|
||||
|
||||
self.is_running = True
|
||||
self.logs.append("--- Starting AI Agent ---")
|
||||
yield # Force UI update immediately
|
||||
|
||||
async with self:
|
||||
self.load_config()
|
||||
# Strict check only for API Key. Input folder is optional for CouchDB (root scan)
|
||||
if not self.api_key:
|
||||
self.logs.append("Error: Missing API Key.")
|
||||
self.is_running = False
|
||||
return
|
||||
|
||||
# If FS mode, input_folder is required.
|
||||
if not self.couch_url and not self.input_folder:
|
||||
self.logs.append("Error: Input Folder required for File System mode.")
|
||||
self.is_running = False
|
||||
return
|
||||
|
||||
# Determine target path
|
||||
target_path = self.input_folder
|
||||
if self.selected_subfolder and self.selected_subfolder != "(All Notes)":
|
||||
target_path = os.path.join(self.input_folder, self.selected_subfolder)
|
||||
self.logs.append(f"Targeting specific folder: {self.selected_subfolder}")
|
||||
|
||||
# Derive Vault Root (Parent of the configured Inbox)
|
||||
vault_root = os.path.dirname(self.input_folder.rstrip(os.sep))
|
||||
|
||||
try:
|
||||
# yield again to ensure user sees "Starting"
|
||||
yield
|
||||
|
||||
# Use active server (CouchDB or FS)
|
||||
server = self._get_active_server()
|
||||
|
||||
# For CouchDB, we don't need a vault_root calculator like FS,
|
||||
# because paths are relative to the DB root already.
|
||||
# But agent expects one. We can pass "" for CouchDB.
|
||||
vault_root = ""
|
||||
if isinstance(server, FileSystemServer):
|
||||
vault_root = os.path.dirname(self.input_folder.rstrip(os.sep))
|
||||
|
||||
agent = ObsidianAgent(api_key=self.api_key, server=server)
|
||||
|
||||
# 1. Get the list of notes first
|
||||
notes = await asyncio.to_thread(server.list_notes, target_path)
|
||||
|
||||
async with self:
|
||||
self.logs.append(f"Found {len(notes)} notes to process.")
|
||||
yield
|
||||
|
||||
if not notes:
|
||||
async with self:
|
||||
self.logs.append("No notes found in that scope.")
|
||||
self.is_running = False
|
||||
return
|
||||
|
||||
# 2. Process them one by one in the thread, but updating state in between
|
||||
for i, note_path in enumerate(notes):
|
||||
# Rate Limit Sleep (except for the first one)
|
||||
if i > 0:
|
||||
async with self:
|
||||
self.logs.append(" ...Waiting 15s (Free Tier Rate Limit)...")
|
||||
yield
|
||||
await asyncio.sleep(15)
|
||||
|
||||
filename = os.path.basename(note_path)
|
||||
content = server.read_note(note_path)
|
||||
preview = (content[:50].replace("\n", " ") + "...") if content else "[EMPTY NOTE]"
|
||||
|
||||
async with self:
|
||||
self.logs.append(f"[{i+1}/{len(notes)}] Analyzing: {filename}")
|
||||
self.logs.append(f" Context: \"{preview}\"")
|
||||
yield # Push update to UI
|
||||
|
||||
# Process single note in thread
|
||||
result = await asyncio.to_thread(
|
||||
agent._process_single_note_with_retry,
|
||||
note_path,
|
||||
content,
|
||||
self.philosophy,
|
||||
self.rewrite_tag,
|
||||
vault_root
|
||||
)
|
||||
|
||||
async with self:
|
||||
self.logs.append(f" -> {result}")
|
||||
yield # Push result
|
||||
|
||||
async with self:
|
||||
self.logs.append("--- All Notes Processed ---")
|
||||
|
||||
except Exception as e:
|
||||
async with self:
|
||||
self.logs.append(f"Critical Error: {str(e)}")
|
||||
finally:
|
||||
async with self:
|
||||
self.is_running = False
|
||||
yield
|
||||
|
||||
async def start_scheduler(self):
|
||||
async with self:
|
||||
if self._scheduler_task: return
|
||||
self._scheduler_task = True
|
||||
self.logs.append("Scheduler active (Daily @ 02:00).")
|
||||
asyncio.create_task(self._run_scheduler_loop())
|
||||
|
||||
async def _run_scheduler_loop(self):
|
||||
def job():
|
||||
# In a real deployment, we'd need a way to trigger the async task from here
|
||||
# For now, we print to stdout which will show in the server logs
|
||||
print("Scheduler Triggered")
|
||||
|
||||
schedule.every().day.at("02:00").do(job)
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
await asyncio.sleep(60)
|
||||
69
obsidian_automator/state/auth.py
Normal file
69
obsidian_automator/state/auth.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import reflex as rx
|
||||
from passlib.context import CryptContext
|
||||
from ..models import User, Configuration
|
||||
from sqlmodel import select
|
||||
|
||||
pwd_context = CryptContext(schemes=["argon2"], deprecated="auto")
|
||||
|
||||
class AuthState(rx.State):
|
||||
username: str = ""
|
||||
password: str = ""
|
||||
confirm_password: str = ""
|
||||
is_logged_in: bool = False
|
||||
|
||||
# Wizard Config Fields
|
||||
setup_api_key: str = ""
|
||||
setup_input_folder: str = ""
|
||||
setup_philosophy: str = ""
|
||||
|
||||
def check_login(self):
|
||||
"""Validates credentials."""
|
||||
with rx.session() as session:
|
||||
user = session.exec(select(User).where(User.username == self.username)).first()
|
||||
if user and pwd_context.verify(self.password, user.password_hash):
|
||||
self.is_logged_in = True
|
||||
return rx.redirect("/")
|
||||
|
||||
return rx.window_alert("Invalid credentials.")
|
||||
|
||||
def logout(self):
|
||||
self.is_logged_in = False
|
||||
self.username = ""
|
||||
self.password = ""
|
||||
return rx.redirect("/login")
|
||||
|
||||
def create_admin_account(self):
|
||||
"""Creates the initial admin account during wizard setup."""
|
||||
if self.password != self.confirm_password:
|
||||
return rx.window_alert("Passwords do not match.")
|
||||
|
||||
if len(self.password) < 8:
|
||||
return rx.window_alert("Password must be at least 8 characters.")
|
||||
|
||||
hashed = pwd_context.hash(self.password)
|
||||
user = User(username=self.username, password_hash=hashed)
|
||||
|
||||
# Also save the initial config
|
||||
config = Configuration(
|
||||
gemini_api_key=self.setup_api_key,
|
||||
input_folder=self.setup_input_folder,
|
||||
philosophy=self.setup_philosophy,
|
||||
is_configured=True
|
||||
)
|
||||
|
||||
with rx.session() as session:
|
||||
session.add(user)
|
||||
session.add(config)
|
||||
session.commit()
|
||||
|
||||
self.is_logged_in = True
|
||||
return rx.redirect("/")
|
||||
|
||||
def on_load_check(self):
|
||||
"""Redirects to setup if no user exists, or login if not authenticated."""
|
||||
with rx.session() as session:
|
||||
user = session.exec(select(User)).first()
|
||||
if not user:
|
||||
return rx.redirect("/setup")
|
||||
elif not self.is_logged_in:
|
||||
return rx.redirect("/login")
|
||||
Reference in New Issue
Block a user