This commit is contained in:
svemagie
2026-03-31 14:20:47 +02:00
commit a5b1ef8158
22 changed files with 40044 additions and 0 deletions

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
node_modules/
*.js.map
.claude/
.DS_Store

142
CLAUDE.md Normal file
View File

@@ -0,0 +1,142 @@
# Memex Chat — CLAUDE.md
Obsidian plugin: Chat with your vault using Claude AI. Semantic TF-IDF + local embedding context retrieval, `@Notizname` mentions, thread history, prompt extension buttons, streaming responses, related notes sidebar.
## Build
```bash
npm install
npm run build # production build → main.js
npm run dev # watch mode with inline sourcemaps
```
Entry: `src/main.ts` → bundled to `main.js` via esbuild (CJS, ES2018 target).
`obsidian` and all `@codemirror/*` / `@lezer/*` packages are external (provided by Obsidian).
## Architecture
| File | Role |
|---|---|
| `src/main.ts` | Plugin entry — `MemexChatPlugin extends Plugin`. Registers views, commands, settings tab. Wires index rebuild, layout-ready hook, sync wait, and embedding progress notices. |
| `src/ChatView.ts` | Main UI — `ChatView extends ItemView`. Thread management, sidebar history, context preview, mode buttons, streaming render, Copy/Save actions. View type: `memex-chat-view`. |
| `src/VaultSearch.ts` | TF-IDF search engine. Builds in-memory index over all vault markdown files. Frontmatter property boost (5×). `findSimilarByName()` for unresolved link hints. Exports `SearchResult` interface (includes optional `linked` field). |
| `src/EmbedSearch.ts` | Local semantic search via `@xenova/transformers` (ONNX, WASM). Caches per-note `.ajson` vectors under `<vault>/.memex-chat/embeddings/`. `searchSimilarToFile()` boosts scores by frontmatter property links (+0.15) and shared tags (+0.05/tag). |
| `src/RelatedNotesView.ts` | Sidebar panel — `RelatedNotesView extends ItemView`. Shows semantically similar notes for the active file; refreshes on file-open. Displays similarity bar and "verknüpft" badge for property-linked notes. View type: `memex-related-notes`. |
| `src/ClaudeClient.ts` | Anthropic API client. `streamChat()` yields `ClaudeStreamChunk` via async generator using native `fetch` + SSE. `chat()` and `fetchModels()` use Obsidian `requestUrl` (no SDK). |
| `src/SettingsTab.ts` | `MemexChatSettingsTab` + `MemexChatSettings` interface + `DEFAULT_SETTINGS`. Exports `PromptButton` interface. Folder autocomplete via `attachFolderDropdown()` helper. |
| `styles.css` | All plugin styles. CSS classes prefixed `vc-` (e.g. `vc-root`, `vc-msg--assistant`, `vc-related-*`, `vc-folder-*`). |
| `manifest.json` | Obsidian plugin manifest. ID: `memex-chat`. Version: `1.0.1`. |
| `main.js` | Compiled output — do not edit manually, always rebuild. |
| `esbuild.config.mjs` | Build config with three plugins: `stubNativeModules` (stubs onnxruntime-node/sharp/canvas), `forceOnnxWeb` (patches ONNX backend detection), `forceOrtWebBrowserMode` (patches ort-web for Electron). |
## Key Patterns
- **Data persistence**: `this.saveData(this.data)` / `this.loadData()` — single object `{ settings, threads }`. Settings merge on load preserves new fields via per-entry spread for `promptButtons`.
- **Streaming**: `ClaudeClient.streamChat()` is an async generator using native `fetch` with `stream: true` and SSE parsing (`content_block_delta` events). `ChatView` iterates it and calls `updateLastMessage()` per chunk. `chat()` and `fetchModels()` use `requestUrl` (buffered, fine for non-streaming calls).
- **Context flow**: Query → `VaultSearch.search()` or `EmbedSearch.search()` → context preview → user confirms → `sendMessage()` injects note content into the Claude prompt. Auto-retrieve skipped when prompt extension buttons are active.
- **Active search engine**: `plugin.activeSearch` returns `EmbedSearch` when enabled, else `VaultSearch`.
- **System prompt layering**: base system prompt → optional `systemContextFile` → active `promptButtons` extension files (each appended with `\n\n---\n`).
- **@mention syntax**: `@Notizname` — autocomplete triggers after 2 chars, inserts full basename. Parsing in `handleSend` matches vault filenames directly (handles spaces & special chars).
- **Prompt buttons**: `activeExtensions: Set<string>` tracks active button file paths. Mode hint panel shows `helpText` above input; hidden after send. Date-search buttons parse month from query and filter files by `getFileDate()`.
- **Thread sidebar**: Inline rename (double-click title). Collapsible "Verlauf" section loads vault chat files not in active threads via `parseThreadFromVault()`.
- **Thread storage**: Optionally saved as Markdown to `threadsFolder` (default `Calendar/Chat/`). Filename: `YYYYMMDDHHmmss Title.md`. Frontmatter includes `id:` for dedup on re-import.
- **Message actions**: Copy (clipboard) and "Als Notiz" (save to Obsidian's default new-note folder) appear on hover for finished assistant messages.
- **Unresolved links**: `is-unresolved` class + inline "Ähnliche Notiz: X" hint via `findSimilarByName()`.
- **History cap**: Last 10 messages sent to API per request.
- **CSS prefix**: `vc-` for all plugin DOM classes. Do not use Obsidian internal class names.
- **Event listeners**: Use `this.registerDomEvent()` for permanent listeners (auto-cleanup on view close). Inline `onclick` / `addEventListener` acceptable for dynamic elements that are re-created.
- **TypeScript**: `strictNullChecks` on, `moduleResolution: bundler`. No tests currently.
## EmbedSearch
- Model: `TaylorAI/bge-micro-v2` (default) — 384-dim, quantized ONNX, WASM backend via CDN (`cdn.jsdelivr.net/npm/onnxruntime-web@1.14.0/dist/`)
- Cache: `<vault>/.memex-chat/embeddings/<note-path>.ajson``{ mtime, vec }`. Manifest at `.manifest.json`.
- Models stored in `<vault>/.memex-chat/models/` (env.cacheDir).
- Incremental flush every 100 embeds; final prune of stale files on completion.
- Per-embed timeout: 13 s (120 s for first call while WASM/model loads).
- `reembedFile(TFile)`: debounced 2 s re-embed on vault `modify` events.
- `searchSimilarToFile(file, topK=10)`: cosine similarity with property/tag boosting (see below).
- `excludeFolders: string[]` — vault folder prefixes skipped during indexing.
- `contextProperties: string[]` — frontmatter keys whose wikilink values get +0.15 score boost; shared tags get +0.05 each (max 3). Scores capped at 1.0.
- Obsidian Sync wait: `waitForSyncIdle()` monitors vault events (5 s probe, 15 s quiet) before starting `buildIndex`.
- esbuild patches required: `stubNativeModules`, `forceOnnxWeb`, `forceOrtWebBrowserMode`. `import.meta.url` defined as a constant string.
## RelatedNotesView
- Opens in right sidebar leaf via `plugin.activateRelatedView()` or sparkles ribbon icon.
- Refreshes on `active-leaf-change` and `file-open` (400 ms debounce).
- `onIndexReady()` called by plugin after `buildIndex` completes.
- Shows: note title, folder path (dimmed), similarity bar + percentage.
- "verknüpft" badge (accent colour) for notes boosted by a property link.
## Settings (MemexChatSettings)
| Field | Default | Description |
|---|---|---|
| `apiKey` | `""` | Anthropic API key |
| `model` | `claude-opus-4-6` | Claude model ID |
| `maxTokens` | `8192` | Max output tokens (102416000) |
| `maxContextNotes` | `6` | TF-IDF/embedding context notes per query |
| `maxCharsPerNote` | `2500` | Characters per context note |
| `systemPrompt` | (German default) | Base system instructions |
| `systemContextFile` | `""` | Optional vault note appended to system prompt |
| `autoRetrieveContext` | `true` | Auto-search on send |
| `showContextPreview` | `true` | Show context confirm step |
| `saveThreadsToVault` | `true` | Save chats as vault markdown files |
| `threadsFolder` | `Calendar/Chat` | Folder for saved threads |
| `sendOnEnter` | `false` | Enter sends (vs. Cmd+Enter) |
| `contextProperties` | `[collection, related, up, tags]` | Frontmatter props boosted 5× in TF-IDF; also used for +0.15 score boost in EmbedSearch |
| `useEmbeddings` | `false` | Enable local semantic embeddings |
| `embeddingModel` | `TaylorAI/bge-micro-v2` | ONNX embedding model ID |
| `embedExcludeFolders` | `[]` | Vault folders excluded from embedding |
| `promptButtons` | Draft Check, Monthly Check | Header mode buttons with system prompt extension |
## Prompt Buttons (PromptButton interface)
```typescript
interface PromptButton {
label: string;
filePath: string; // vault path to prompt note (without .md)
searchMode?: "date"; // enables date-based file search
searchFolders?: string[]; // restrict date search to these folders
helpText?: string; // shown above input when button is active
}
```
## Folder Autocomplete
`attachFolderDropdown(wrap, input, getExcluded, onPick)` helper in `SettingsTab.ts` applied to:
- `embedExcludeFolders` (chip-tag list)
- prompt button `searchFolders` (chip-tag list)
- `threadsFolder` (single value)
CSS classes: `vc-folder-search-wrap`, `vc-folder-dropdown`, `vc-folder-item`.
## Deployment (Manual)
Copy `main.js`, `manifest.json`, `styles.css` into `.obsidian/plugins/memex-chat/` in the target vault.
## Models (SettingsTab.ts)
Static `MODELS` array (fallback / initial dropdown population):
| ID | Label |
|---|---|
| `claude-opus-4-6` | Claude Opus 4.6 (Stärkste) |
| `claude-sonnet-4-6` | Claude Sonnet 4.6 (Empfohlen) |
| `claude-haiku-4-5-20251001` | Claude Haiku 4.5 (Schnell) |
Default: `claude-opus-4-6`.
**"Aktualisieren" button**: calls `ClaudeClient.fetchModels(apiKey)` to fetch the live model list from the Anthropic API and repopulate the dropdown dynamically. This supersedes the static array at runtime. Update `MODELS` and `DEFAULT_SETTINGS.model` only when changing the compile-time fallback.
## Embedding Models (EmbedSearch.ts)
`EMBEDDING_MODELS` array exported from `EmbedSearch.ts` and used to populate the embedding model dropdown in settings:
| ID | Description |
|---|---|
| `TaylorAI/bge-micro-v2` | BGE Micro v2 — default, 384-dim, fastest |
| `Xenova/all-MiniLM-L6-v2` | MiniLM L6 v2 — 384-dim |
| `Xenova/multilingual-e5-small` | Multilingual E5 Small — DE/EN |
| `Xenova/paraphrase-multilingual-MiniLM-L12-v2` | Multilingual MiniLM L12 |

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Sven
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

138
README.md Normal file
View File

@@ -0,0 +1,138 @@
# Memex Chat — Obsidian Plugin
Chat with your Obsidian vault using Claude AI. Ask questions about your notes, get context-aware answers, and explore semantic connections — all without leaving Obsidian.
## Features
- **Vault search** — TF-IDF index by default; enable local embeddings for hybrid mode (TF-IDF + semantic merged via RRF), fully offline after first model download
- **Related notes sidebar** — panel showing the most similar notes to whatever you have open, ranked by semantic similarity + frontmatter links + shared tags
- **Auto context** — relevant notes are automatically found and sent to Claude as context
- **Context preview** — see which notes are included before sending, or dismiss to send without context
- **`@mention` autocomplete** — pin specific notes into context directly from the input field
- **Thread history** — chats saved as Markdown in your vault (default: `Calendar/Chat/`)
- **Source links** — every answer shows which notes were used as context
- **Prompt buttons** — header mode buttons that extend Claude's system prompt (e.g. draft check, monthly review)
## Installation
1. Download `main.js`, `manifest.json`, `styles.css` from the [latest release](../../releases/latest)
2. Copy into `.obsidian/plugins/memex-chat/` in your vault
3. Enable in **Settings → Community Plugins → Memex Chat**
4. Add your [Anthropic API key](https://console.anthropic.com/) in plugin settings
## Build from Source
```bash
npm install
npm run build
```
Requires Node 18+.
## Usage
### Basic workflow
1. Open the chat panel via the ribbon icon or the **Memex Chat öffnen** command
2. Type your question and press **Cmd+Enter** (or Enter if configured)
3. If **Auto retrieve context** is on, relevant notes are found automatically and shown in a preview
4. Confirm or dismiss the context, then your message is sent to Claude with the note content injected
### @mentions
Type `@` followed by at least 2 characters to trigger autocomplete for note names. Selecting a note adds it to the explicit context for that message, regardless of search results.
### Context preview
When **Show context preview** is on, a list of notes appears above the input before each send. You can dismiss it to send without context, or confirm to proceed. The notes used are shown as source links below the assistant's reply.
### Thread management
- Threads are listed in the sidebar of the chat panel under **Verlauf**
- Double-click a thread title to rename it inline
- Each thread is saved as a Markdown file in your configured threads folder, with a frontmatter `id:` field used for deduplication on re-import
- Vault chat files not already in active threads are loaded on demand from the sidebar
### Message actions
Hover over a finished assistant message to reveal two actions:
- **Copy** — copies the message text to the clipboard
- **Als Notiz** — saves the message as a new note in Obsidian's default new-note location
### Prompt buttons
Header buttons that activate a mode by extending Claude's system prompt with the contents of a vault note. Multiple buttons can be active at once.
When a button is active:
- The file at its configured vault path is appended to the system prompt
- An optional hint is shown above the input
- If `searchMode: "date"` is set, context retrieval switches to date-based file lookup (useful for monthly review modes)
- Auto context retrieval is skipped
Configure prompt buttons in **Settings → Prompt Buttons**.
### System context file
In settings you can specify a vault note to always append to the system prompt (after the base prompt, before any active prompt buttons). Useful for personal context like your name, current projects, or standing instructions.
## Commands
| Command | Description |
|---|---|
| `Memex Chat öffnen` | Open the chat panel |
| `Verwandte Notizen` | Open the related notes sidebar |
| `Memex Chat: Index neu aufbauen` | Rebuild the search index |
| `Memex Chat: Aktive Notiz als Kontext` | Ask Claude about the currently open note |
## Related Notes Sidebar
Requires embeddings to be enabled. Opens in the right sidebar and automatically shows the top 10 most similar notes to the currently active file. Similarity is computed from:
1. **Semantic embedding similarity** (cosine similarity on 384-dim vectors)
2. **+0.15 boost** for notes linked via `contextProperties` frontmatter fields (e.g. `related: [[Note]]`)
3. **+0.05 per shared tag** (up to +0.15)
Notes boosted by a frontmatter link are marked with a **verknüpft** badge.
## Settings
### General
| Setting | Default | Description |
|---|---|---|
| API Key | — | Your Anthropic API key |
| Model | `claude-opus-4-6` | Which Claude model to use. Click **Aktualisieren** to fetch the live model list from the Anthropic API. |
| Max tokens | 8192 | Maximum output tokens per response |
| Max context notes | 6 | How many notes to retrieve per query |
| Max chars per note | 2500 | How much of each note to include |
| System prompt | (German default) | Base instructions sent to Claude on every request |
| System context file | — | Optional vault note appended to system prompt |
| Auto retrieve context | on | Automatically find relevant notes on send |
| Context preview | on | Show context before sending |
| Save threads to vault | on | Persist chats as Markdown files |
| Threads folder | `Calendar/Chat` | Where to save thread files |
| Send on Enter | off | Enter sends (vs. Cmd+Enter) |
| Context properties | `collection, related, up, tags` | Frontmatter properties whose wikilink values boost search ranking |
### Embeddings (optional)
| Setting | Default | Description |
|---|---|---|
| Use embeddings | off | Enable hybrid search (TF-IDF + semantic, merged via RRF) |
| Embedding model | BGE Micro v2 | ONNX model for local inference |
| Exclude folders | — | Vault folders skipped during embedding |
| Model | Notes |
|---|---|
| `TaylorAI/bge-micro-v2` | Default — fastest, 384-dim |
| `Xenova/all-MiniLM-L6-v2` | 384-dim |
| `Xenova/multilingual-e5-small` | German + English |
| `Xenova/paraphrase-multilingual-MiniLM-L12-v2` | German + English, larger |
Embeddings are computed locally (no API call) and cached in `<vault>/.memex-chat/embeddings/`. The model (~22 MB) is downloaded once to `<vault>/.memex-chat/models/`. Indexing progress is shown as an Obsidian notice. Obsidian Sync activity is detected automatically — indexing waits until sync is idle before starting.
Once indexing completes, context retrieval switches to **hybrid mode**: TF-IDF and semantic results are fetched independently then rank-merged via Reciprocal Rank Fusion. Notes that score well in both engines rise to the top; notes found by only one are still included if their rank is strong enough. This catches paraphrased queries that TF-IDF misses and avoids the over-broadness of embeddings alone.
## License
MIT

View File

@@ -0,0 +1,173 @@
# Fetch Models Implementation Plan
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** Add an "Aktualisieren" button next to the model dropdown in settings that fetches the 3 newest Claude models from the Anthropic API and repopulates the dropdown.
**Architecture:** `ClaudeClient` gains a `fetchModels()` method (reusing existing `requestUrl`/`headers` patterns). `SettingsTab` captures the `DropdownComponent` and `ButtonComponent` references, wires the button to call `fetchModels`, and rebuilds the dropdown on success.
**Tech Stack:** TypeScript, Obsidian plugin API (`requestUrl`, `Setting`, `DropdownComponent`, `ButtonComponent`, `Notice`), Anthropic Models API (`GET /v1/models`)
---
## File Map
| File | Change |
|---|---|
| `src/ClaudeClient.ts` | Add `fetchModels(apiKey)` method |
| `src/SettingsTab.ts` | Update imports; refactor "Modell" `Setting` to capture dropdown + add button |
---
### Task 1: Add `fetchModels` to `ClaudeClient`
**Files:**
- Modify: `src/ClaudeClient.ts`
- [ ] **Step 1: Add the method after the `chat()` method**
In `src/ClaudeClient.ts`, add after line 86 (after `chat()`'s closing brace):
```typescript
/** Fetch the 3 newest Claude models from the Anthropic Models API. */
async fetchModels(apiKey: string): Promise<{ id: string; name: string }[]> {
const response = await requestUrl({
url: "https://api.anthropic.com/v1/models",
method: "GET",
headers: this.headers(apiKey),
throw: false,
});
if (response.status >= 400) {
throw new Error(`API Error ${response.status}: ${response.text}`);
}
const data: { id: string; created: number }[] = response.json.data ?? [];
if (data.length === 0) {
throw new Error("No models returned");
}
return data
.sort((a, b) => b.created - a.created)
.slice(0, 3)
.map((m) => ({ id: m.id, name: m.id }));
}
```
- [ ] **Step 2: Verify build passes**
```bash
npm run build
```
Expected: no TypeScript errors, `main.js` written successfully.
- [ ] **Step 3: Commit**
```bash
git add src/ClaudeClient.ts
git commit -m "feat: add fetchModels to ClaudeClient"
```
---
### Task 2: Update SettingsTab — imports and model setting
**Files:**
- Modify: `src/SettingsTab.ts:1` (import line)
- Modify: `src/SettingsTab.ts:151-160` (the "Modell" Setting block)
- [ ] **Step 1: Extend the import from `"obsidian"`**
Replace the current import line 1:
```typescript
import { App, PluginSettingTab, Setting } from "obsidian";
```
With:
```typescript
import { App, ButtonComponent, DropdownComponent, Notice, PluginSettingTab, Setting } from "obsidian";
```
- [ ] **Step 2: Replace the "Modell" Setting block**
Replace lines 151160:
```typescript
new Setting(containerEl)
.setName("Modell")
.setDesc("Welches Claude-Modell verwenden?")
.addDropdown((drop) => {
for (const m of MODELS) drop.addOption(m.id, m.name);
drop.setValue(this.plugin.settings.model).onChange(async (value) => {
this.plugin.settings.model = value;
await this.plugin.saveSettings();
});
});
```
With:
```typescript
let modelDrop: DropdownComponent;
let refreshBtn: ButtonComponent;
new Setting(containerEl)
.setName("Modell")
.setDesc("Welches Claude-Modell verwenden?")
.addDropdown((drop) => {
modelDrop = drop;
for (const m of MODELS) drop.addOption(m.id, m.name);
drop.setValue(this.plugin.settings.model).onChange(async (value) => {
this.plugin.settings.model = value;
await this.plugin.saveSettings();
});
})
.addButton((btn) => {
refreshBtn = btn;
btn.setButtonText("Aktualisieren").onClick(async () => {
const prev = modelDrop.getValue();
refreshBtn.setDisabled(true);
refreshBtn.setButtonText("...");
try {
const models = await this.plugin.claude.fetchModels(this.plugin.settings.apiKey);
modelDrop.selectEl.empty();
for (const m of models) modelDrop.addOption(m.id, m.name);
modelDrop.setValue(prev);
this.plugin.settings.model = modelDrop.getValue();
await this.plugin.saveSettings();
} catch (err) {
new Notice("Modelle konnten nicht geladen werden: " + (err as Error).message);
} finally {
refreshBtn.setDisabled(false);
refreshBtn.setButtonText("Aktualisieren");
}
});
});
```
- [ ] **Step 3: Verify build passes**
```bash
npm run build
```
Expected: no TypeScript errors, `main.js` written successfully.
- [ ] **Step 4: Manual smoke test in Obsidian**
1. Copy `main.js`, `manifest.json`, `styles.css` to `.obsidian/plugins/memex-chat/` in your vault
2. Reload the plugin (or restart Obsidian)
3. Open Settings → Memex Chat
4. Confirm the "Modell" row has a dropdown and an "Aktualisieren" button
5. With a valid API key set, click "Aktualisieren" — button should show "...", then restore; dropdown should show 3 model IDs
6. With no API key, click "Aktualisieren" — a Notice should appear with an error message; dropdown should be unchanged
- [ ] **Step 5: Commit**
```bash
git add src/SettingsTab.ts
git commit -m "feat: add Aktualisieren button to fetch models from API"
```

View File

@@ -0,0 +1,89 @@
# Fetch Models Design
**Date:** 2026-03-27
**Status:** Approved
## Summary
Add an "Aktualisieren" button to the Model setting in the settings tab. When clicked, it fetches the 3 newest Claude models from the Anthropic Models API and updates the dropdown. Falls back to the hardcoded `MODELS` list if the request fails or returns no models.
## ClaudeClient changes
Add a new method `fetchModels(apiKey: string): Promise<{id: string, name: string}[]>` to `ClaudeClient`.
- URL: `"https://api.anthropic.com/v1/models"` — inline string or a separate private constant; **do not use or modify `baseUrl`** (which points to `/v1/messages`)
- Call `requestUrl` with `throw: false` (same pattern as `streamChat`/`chat`) and `this.headers(apiKey)`
- Response shape: `{ data: [{ id: string, created: number, display_name: string, ... }] }`
- Throw on `response.status >= 400` with the response text
- If `data` is empty, throw an error ("No models returned") — do not return an empty array
- Sort `data` descending by `created`, take top 3
- Return `{ id, name: id }` for each — use `id` as the display name (not `display_name`). Note: fetched entries will show raw IDs (e.g. `claude-opus-4-6`) while hardcoded `MODELS` show human-friendly names (e.g. `"Claude Opus 4.6 (Stärkste)"`). This is intentional — keeps the implementation simple and avoids relying on API-provided display strings.
## SettingsTab changes
**Import addition:** Add `Notice, ButtonComponent, DropdownComponent` to the `import { ... } from "obsidian"` line.
Convert the existing "Modell" `Setting` to capture both the `DropdownComponent` and `ButtonComponent` references by chaining `addDropdown()` and `addButton()` on the same `Setting` instance:
```typescript
let modelDrop: DropdownComponent;
let refreshBtn: ButtonComponent;
new Setting(containerEl)
.setName("Modell")
.setDesc("Welches Claude-Modell verwenden?")
.addDropdown((drop) => {
modelDrop = drop;
for (const m of MODELS) drop.addOption(m.id, m.name);
drop.setValue(this.plugin.settings.model).onChange(async (value) => {
this.plugin.settings.model = value;
await this.plugin.saveSettings();
});
})
.addButton((btn) => {
refreshBtn = btn;
btn.setButtonText("Aktualisieren").onClick(async () => { /* see click flow */ });
});
```
**Click flow:**
1. Capture current value: `const prev = modelDrop.getValue()`
2. `refreshBtn.setDisabled(true)` and `refreshBtn.setButtonText("...")`
3. In a try/catch/finally:
- **try:** Call `this.plugin.claude.fetchModels(this.plugin.settings.apiKey)`
- On success: clear dropdown with `modelDrop.selectEl.empty()`, repopulate via `modelDrop.addOption(id, name)` for each fetched model, then set value to `prev` if it exists among the fetched ids, otherwise the first fetched id; save via `this.plugin.settings.model = modelDrop.getValue(); await this.plugin.saveSettings()`
- **catch:** `new Notice("Modelle konnten nicht geladen werden: " + err.message)` — dropdown is **not** modified on error (hardcoded options remain)
- **finally:** `refreshBtn.setDisabled(false)` and `refreshBtn.setButtonText("Aktualisieren")`
**Fallback:** The hardcoded `MODELS` array in `SettingsTab.ts` is unchanged and remains the initial population of the dropdown on every settings open.
## Data flow
```
[Aktualisieren button click]
→ capture prev = modelDrop.getValue()
→ disable button, show "..."
→ this.plugin.claude.fetchModels(apiKey) [throw: false, separate URL]
→ throw if status >= 400 or data empty
→ sort by created desc, take 3
→ return [{id, name: id}]
→ clear selectEl, repopulate, restore selection
→ save model to settings
→ finally: restore button
```
## Error handling
| Scenario | Behaviour |
|---|---|
| No API key (401) | Notice shown; dropdown unchanged |
| Network failure | Notice shown; dropdown unchanged |
| Empty `data` array | Treated as error; Notice shown; dropdown unchanged |
| Fewer than 3 models returned | Take all returned (no error) |
## Out of scope
- Persisting fetched models across restarts
- Auto-fetching on settings open or plugin startup
- Configurable count of models to show
- Updating `DEFAULT_SETTINGS.model` after a fetch

97
esbuild.config.mjs Normal file
View File

@@ -0,0 +1,97 @@
import esbuild from "esbuild";
import process from "process";
import builtins from "builtin-modules";
import { readFile } from "fs/promises";
const prod = process.argv[2] === "production";
// @xenova/transformers requires several native modules unconditionally even though
// the Electron renderer only uses the WASM (onnxruntime-web) path.
// Stub them out so the require() calls don't throw at runtime.
const stubNativeModules = {
name: "stub-native-modules",
setup(build) {
const stubs = /^(onnxruntime-node|sharp|canvas)$/;
build.onResolve({ filter: stubs }, (args) => ({
path: args.path,
namespace: "stub",
}));
build.onLoad({ filter: /.*/, namespace: "stub" }, () => ({
contents: "module.exports = {};",
loader: "js",
}));
},
};
// In Electron's renderer process, process.release.name === "node" is true, which
// causes @xenova/transformers to select the onnxruntime-node backend (our stub).
// Patch onnx.js at build time to always use the WASM/web backend instead.
const forceOnnxWeb = {
name: "force-onnx-web",
setup(build) {
build.onLoad({ filter: /backends\/onnx\.js$/ }, async (args) => {
let source = await readFile(args.path, "utf8");
source = source.replace(
"process?.release?.name === 'node'",
"false"
);
return { contents: source, loader: "js" };
});
},
};
// ort-web detects Node.js via process.versions.node and uses threaded WASM + worker_threads,
// which fails in Electron's renderer. Force browser mode so it uses non-threaded WASM instead.
const forceOrtWebBrowserMode = {
name: "force-ort-web-browser-mode",
setup(build) {
build.onLoad({ filter: /ort-web\.min\.js$/ }, async (args) => {
let source = await readFile(args.path, "utf8");
// Replace all three occurrences of the Node.js version check
source = source.replaceAll(
'"string"==typeof process.versions.node',
"false"
);
return { contents: source, loader: "js" };
});
},
};
const context = await esbuild.context({
entryPoints: ["src/main.ts"],
bundle: true,
plugins: [stubNativeModules, forceOnnxWeb, forceOrtWebBrowserMode],
external: [
"obsidian",
"electron",
"@codemirror/autocomplete",
"@codemirror/collab",
"@codemirror/commands",
"@codemirror/language",
"@codemirror/lint",
"@codemirror/search",
"@codemirror/state",
"@codemirror/view",
"@lezer/common",
"@lezer/highlight",
"@lezer/lr",
...builtins,
],
format: "cjs",
target: "es2020", // raised from es2018 to support BigInt used by @xenova/transformers
// @xenova/transformers uses import.meta.url to locate its own files.
// In CJS output, esbuild stubs import.meta as {}, making fileURLToPath(undefined) throw.
// Provide a valid dummy URL so init_env() can complete and env.backends is populated.
define: { "import.meta.url": '"file:///obsidian-bundle.js"' },
logLevel: "info",
sourcemap: prod ? false : "inline",
treeShaking: true,
outfile: "main.js",
});
if (prod) {
await context.rebuild();
process.exit(0);
} else {
await context.watch();
}

33665
main.js Normal file

File diff suppressed because one or more lines are too long

10
manifest.json Normal file
View File

@@ -0,0 +1,10 @@
{
"id": "memex-chat",
"name": "Memex Chat",
"version": "1.0.3",
"minAppVersion": "1.4.0",
"description": "Chat with your Obsidian vault using Claude AI — semantic context retrieval, @ mentions, thread history.",
"author": "Sven",
"authorUrl": "https://github.com/svemagie",
"isDesktopOnly": false
}

1603
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

28
package.json Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "memex-chat",
"version": "1.0.3",
"description": "Obsidian plugin: Chat with your vault using Claude AI",
"author": "Sven",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/svemagie/memex-chat.git"
},
"bugs": {
"url": "https://github.com/svemagie/memex-chat/issues"
},
"homepage": "https://github.com/svemagie/memex-chat#readme",
"main": "main.js",
"scripts": {
"build": "node esbuild.config.mjs production",
"dev": "node esbuild.config.mjs"
},
"devDependencies": {
"@types/node": "^20.0.0",
"@xenova/transformers": "^2.17.2",
"builtin-modules": "^3.3.0",
"esbuild": "^0.25.0",
"obsidian": "latest",
"typescript": "^5.0.0"
}
}

1108
src/ChatView.ts Normal file

File diff suppressed because it is too large Load Diff

164
src/ClaudeClient.ts Normal file
View File

@@ -0,0 +1,164 @@
import { requestUrl } from "obsidian";
import * as https from "https";
export interface ClaudeMessage {
role: "user" | "assistant";
content: string;
}
export interface ClaudeOptions {
apiKey: string;
model: string;
maxTokens?: number;
systemPrompt: string;
}
export interface ClaudeStreamChunk {
type: "text" | "done" | "error";
text?: string;
error?: string;
}
/** Minimal Claude API client. streamChat uses fetch+SSE; other methods use requestUrl. */
export class ClaudeClient {
private baseUrl = "https://api.anthropic.com/v1/messages";
private headers(apiKey: string): Record<string, string> {
return {
"content-type": "application/json",
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
};
}
/**
* Stream a chat completion via Node.js https + SSE, yielding text chunks as they arrive.
* Uses the Node.js https module (available in Obsidian's Electron renderer via Node integration)
* to bypass Electron's CORS/CSP restrictions that block fetch and XHR to external APIs.
*/
async *streamChat(
messages: ClaudeMessage[],
options: ClaudeOptions
): AsyncGenerator<ClaudeStreamChunk> {
const queue: ClaudeStreamChunk[] = [];
let done = false;
let wakeup: (() => void) | null = null;
const push = (c: ClaudeStreamChunk) => { queue.push(c); wakeup?.(); wakeup = null; };
const finish = () => { done = true; wakeup?.(); wakeup = null; };
const body = JSON.stringify({
model: options.model,
max_tokens: options.maxTokens ?? 8192,
system: options.systemPrompt,
messages,
stream: true,
});
const req = https.request(
{
hostname: "api.anthropic.com",
path: "/v1/messages",
method: "POST",
headers: {
...this.headers(options.apiKey),
"content-length": Buffer.byteLength(body).toString(),
},
},
(res) => {
if ((res.statusCode ?? 0) >= 400) {
let errBody = "";
res.on("data", (d: Buffer) => errBody += d.toString());
res.on("end", () => { push({ type: "error", error: `API Error ${res.statusCode}: ${errBody}` }); finish(); });
return;
}
let buf = "";
res.on("data", (chunk: Buffer) => {
buf += chunk.toString();
const lines = buf.split("\n");
buf = lines.pop() ?? ""; // keep partial last line
for (const line of lines) {
if (!line.startsWith("data: ")) continue;
const data = line.slice(6).trim();
if (data === "[DONE]") return;
try {
const ev = JSON.parse(data);
if (ev.type === "content_block_delta" && ev.delta?.type === "text_delta") {
push({ type: "text", text: ev.delta.text });
}
} catch { /* skip malformed lines */ }
}
});
res.on("end", () => { finish(); });
res.on("error", (e: Error) => { push({ type: "error", error: e.message }); finish(); });
}
);
req.on("error", (e: Error) => { push({ type: "error", error: e.message }); finish(); });
req.write(body);
req.end();
while (true) {
while (queue.length) yield queue.shift()!;
if (done) break;
await new Promise<void>(r => { wakeup = r; });
}
while (queue.length) yield queue.shift()!;
yield { type: "done" };
}
/** Non-streaming convenience wrapper */
async chat(messages: ClaudeMessage[], options: ClaudeOptions): Promise<string> {
const response = await requestUrl({
url: this.baseUrl,
method: "POST",
headers: this.headers(options.apiKey),
body: JSON.stringify({
model: options.model,
max_tokens: options.maxTokens ?? 8192,
system: options.systemPrompt,
messages,
}),
throw: false,
});
if (response.status >= 400) {
throw new Error(`API Error ${response.status}: ${response.text}`);
}
return response.json.content?.[0]?.text ?? "";
}
/**
* Fetch Claude models from the Anthropic Models API.
* Returns the 2 newest versions of each family (opus, sonnet, haiku), in that order.
*/
async fetchModels(apiKey: string): Promise<{ id: string; name: string }[]> {
const response = await requestUrl({
url: "https://api.anthropic.com/v1/models",
method: "GET",
headers: this.headers(apiKey),
throw: false,
});
if (response.status >= 400) {
throw new Error(`API Error ${response.status}: ${response.text}`);
}
const data: { id: string; created: number }[] = response.json.data ?? [];
if (data.length === 0) {
throw new Error("No models returned");
}
const sorted = data.sort((a, b) => b.created - a.created);
const families = ["opus", "sonnet", "haiku"] as const;
return families.flatMap((family) =>
sorted
.filter((m) => m.id.includes(family))
.slice(0, 2)
.map((m) => ({ id: m.id, name: m.id }))
);
}
}

441
src/EmbedSearch.ts Normal file
View File

@@ -0,0 +1,441 @@
import { App, TFile } from "obsidian";
import { promises as fsp } from "fs";
import { join, relative, dirname } from "path";
import type { SearchResult } from "./VaultSearch";
export const EMBEDDING_MODELS = [
{ id: "TaylorAI/bge-micro-v2", name: "BGE Micro v2 (schnell, 384-dim, empfohlen)" },
{ id: "Xenova/all-MiniLM-L6-v2", name: "MiniLM L6 v2 (384-dim)" },
{ id: "Xenova/multilingual-e5-small", name: "Multilingual E5 Small (mehrsprachig, DE/EN)" },
{ id: "Xenova/paraphrase-multilingual-MiniLM-L12-v2", name: "Multilingual MiniLM L12 (mehrsprachig)" },
];
interface EmbedCacheEntry { mtime: number; vec: number[] }
interface Manifest { model: string; version: number }
/**
* Semantic search engine using Transformers.js for local embeddings.
*
* All data lives under <vault>/.memex-chat/:
* models/ — downloaded ONNX model files (via env.cacheDir)
* embeddings/.manifest.json — model name + version
* embeddings/some/note.ajson — { mtime, vec }
*
* WASM runtime is loaded from CDN (cdn.jsdelivr.net) on first use.
*/
export class EmbedSearch {
private app: App;
private modelId: string;
excludeFolders: string[] = []; // vault folder prefixes to skip
contextProperties: string[] = []; // frontmatter keys whose links get a score boost
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private pipe: ((text: string, opts: object) => Promise<{ data: Float32Array }>) | null = null;
private cache: Map<string, EmbedCacheEntry> = new Map(); // vaultPath → entry
private vecs: Map<string, { vec: number[]; file: TFile }> = new Map();
private indexed = false;
private indexing = false;
/** Called every ~5 notes during indexing. speed = newly embedded notes/sec (cached notes excluded). */
onProgress?: (done: number, total: number, speed: number) => void;
/** Called during model/WASM download with a human-readable status string. */
onModelStatus?: (status: string) => void;
constructor(app: App, modelId: string) {
this.app = app;
this.modelId = modelId;
}
isIndexed(): boolean { return this.indexed; }
// ─── Paths ───────────────────────────────────────────────────────────────
private get vaultRoot(): string {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return (this.app.vault.adapter as any).basePath as string;
}
private get baseDir(): string {
return join(this.vaultRoot, ".memex-chat");
}
private get modelsDir(): string {
return join(this.baseDir, "models");
}
private get embedDir(): string {
return join(this.baseDir, "embeddings");
}
private get manifestPath(): string {
return join(this.embedDir, ".manifest.json");
}
/** Disk path for the embedding of a vault-relative note path (e.g. "folder/note.md") */
private noteEmbedPath(vaultPath: string): string {
return join(this.embedDir, vaultPath.replace(/\.md$/, ".ajson"));
}
// ─── Pipeline ────────────────────────────────────────────────────────────
private async loadPipeline(): Promise<void> {
if (this.pipe) return;
// Use require() — reliable in CJS bundle; still lazy since we're inside an async function.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const { pipeline, env } = require("@xenova/transformers") as any;
env.backends.onnx.wasm.wasmPaths =
"https://cdn.jsdelivr.net/npm/onnxruntime-web@1.14.0/dist/";
env.backends.onnx.wasm.proxy = false; // proxy Worker hangs in Obsidian; run inline instead
env.backends.onnx.wasm.numThreads = 1;
env.allowLocalModels = false;
env.allowRemoteModels = true;
env.useBrowserCache = false;
env.useFSCache = true;
env.cacheDir = this.modelsDir; // store downloaded models in vault's .memex-chat/models/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const progress_callback = (p: any) => {
if (!this.onModelStatus) return;
if (p.status === "initiate") {
this.onModelStatus(`Lade Modell: ${p.name ?? p.file ?? ""}`);
} else if (p.status === "download") {
const pct = p.progress != null ? ` ${Math.round(p.progress)}%` : "";
const mb = p.total ? ` (${(p.total / 1e6).toFixed(1)} MB)` : "";
this.onModelStatus(`Download${pct}${mb}: ${p.file ?? ""}`);
} else if (p.status === "ready") {
this.onModelStatus("Modell bereit");
}
};
this.pipe = await pipeline("feature-extraction", this.modelId, {
quantized: true,
progress_callback,
});
}
private async embed(text: string): Promise<number[]> {
await this.loadPipeline();
const result = await this.pipe!(text.slice(0, 512), { pooling: "mean", normalize: true });
return Array.from(result.data);
}
/** embed() with a hard timeout; rejects with "embed timeout" if exceeded. */
private embedWithTimeout(text: string, ms = 13000): Promise<number[]> {
return Promise.race([
this.embed(text),
new Promise<number[]>((_, reject) =>
setTimeout(() => reject(new Error("embed timeout")), ms)
),
]);
}
private cosine(a: number[], b: number[]): number {
let dot = 0;
for (let i = 0; i < a.length; i++) dot += a[i] * b[i];
return dot;
}
// ─── Index ────────────────────────────────────────────────────────────────
async buildIndex(): Promise<void> {
if (this.indexing) return;
this.indexing = true;
this.indexed = false;
this.vecs.clear();
const changed: string[] = []; // vault paths newly embedded this run
let pipelineError: unknown = null;
// Create directories unconditionally — independent of pipeline success
try {
await fsp.mkdir(this.modelsDir, { recursive: true });
await fsp.mkdir(this.embedDir, { recursive: true });
} catch (e) {
console.error("[Memex] Verzeichnisse konnten nicht angelegt werden:", e);
}
try {
await this.loadCache();
const allFiles = this.app.vault.getMarkdownFiles();
const files = this.excludeFolders.length
? allFiles.filter((f) => !this.excludeFolders.some((ex) => f.path.startsWith(ex + "/")))
: allFiles;
const total = files.length;
let done = 0;
let windowStart = Date.now();
let windowEmbedded = 0;
let speed = 0;
for (const file of files) {
const mtime = file.stat.mtime;
const cached = this.cache.get(file.path);
if (cached && cached.mtime === mtime) {
this.vecs.set(file.path, { vec: cached.vec, file });
} else {
try {
// Yield before each inference so Obsidian's event loop can process events
// (WASM inference is synchronous and blocks the main thread briefly per note)
await new Promise((r) => setTimeout(r, 0));
const raw = await this.app.vault.cachedRead(file);
const text = this.preprocess(raw).slice(0, 800) + " " + file.basename;
// First call initialises WASM + loads model — allow extra time
const vec = await this.embedWithTimeout(text, this.pipe ? 13000 : 120000);
this.cache.set(file.path, { mtime, vec });
this.vecs.set(file.path, { vec, file });
changed.push(file.path);
windowEmbedded++;
// Flush newly embedded notes to disk every 100 to preserve progress
if (changed.length % 100 === 0) await this.flushBatch(changed.slice(-100));
} catch (e) {
if (!this.pipe && !pipelineError) {
// Pipeline failed to load — log once and abort embedding loop
pipelineError = e;
console.error("[Memex] Pipeline-Ladefehler:", e);
break;
}
console.warn("[Memex] Datei übersprungen:", file.path, e);
// skip individual file
}
}
done++;
if (this.onProgress && done % 5 === 0) {
const elapsed = (Date.now() - windowStart) / 1000;
if (elapsed > 0 && windowEmbedded > 0) {
speed = windowEmbedded / elapsed;
if (windowEmbedded >= 25) { windowStart = Date.now(); windowEmbedded = 0; }
}
this.onProgress(done, total, speed);
}
}
if (pipelineError) throw pipelineError;
const allPaths = new Set(files.map((f) => f.path));
// Flush remainder (notes not yet flushed by the every-100 batches)
const remainder = changed.length % 100;
await this.saveCache(remainder > 0 ? changed.slice(-remainder) : [], allPaths);
this.indexed = true;
if (this.onProgress) this.onProgress(total, total, speed);
} catch (e) {
console.error("[Memex] buildIndex Fehler:", e);
} finally {
this.indexing = false;
}
}
// ─── Incremental re-embed on file change ─────────────────────────────────
private reembedTimers: Map<string, ReturnType<typeof setTimeout>> = new Map();
/**
* Debounced re-embed for a single file (called on vault modify events).
* Waits 2 s after the last write before embedding.
*/
reembedFile(file: TFile): void {
if (!this.indexed || this.indexing) return;
const existing = this.reembedTimers.get(file.path);
if (existing) clearTimeout(existing);
const timer = setTimeout(async () => {
this.reembedTimers.delete(file.path);
try {
const raw = await this.app.vault.cachedRead(file);
const text = this.preprocess(raw).slice(0, 800) + " " + file.basename;
const vec = await this.embedWithTimeout(text);
const mtime = file.stat.mtime;
this.cache.set(file.path, { mtime, vec });
this.vecs.set(file.path, { vec, file });
await this.saveCache([file.path], new Set(this.vecs.keys()));
} catch (e) {
console.warn("[Memex] Re-embed fehlgeschlagen:", file.path, e);
}
}, 2000);
this.reembedTimers.set(file.path, timer);
}
/** Find notes similar to a given file using its cached vector (no re-embedding). */
async searchSimilarToFile(file: TFile, topK = 10): Promise<SearchResult[]> {
if (!this.indexed) return [];
let qvec = this.vecs.get(file.path)?.vec;
if (!qvec) {
// File not yet indexed — embed on the fly
try {
const raw = await this.app.vault.cachedRead(file);
const text = this.preprocess(raw).slice(0, 800) + " " + file.basename;
qvec = await this.embedWithTimeout(text);
} catch { return []; }
}
// Collect paths explicitly linked via contextProperty frontmatter fields
const linkedPaths = new Set<string>();
if (this.contextProperties.length > 0) {
const meta = this.app.metadataCache.getFileCache(file);
const links = meta?.frontmatterLinks ?? [];
for (const link of links) {
if (this.contextProperties.includes(link.key.split(".")[0])) {
const resolved = this.app.metadataCache.getFirstLinkpathDest(link.link, file.path);
if (resolved) linkedPaths.add(resolved.path);
}
}
}
// Collect tags of the current file
const fileMeta = this.app.metadataCache.getFileCache(file);
const fileTags = new Set<string>(
(fileMeta?.tags ?? []).map((t) => t.tag.toLowerCase())
);
const scores: Array<[string, number]> = [];
for (const [path, { vec }] of this.vecs) {
if (path === file.path) continue;
let s = this.cosine(qvec, vec);
if (s < 0.15) continue; // broader pre-filter to allow boosted notes through
if (linkedPaths.has(path)) {
s = Math.min(1.0, s + 0.15);
}
if (fileTags.size > 0) {
const otherMeta = this.app.metadataCache.getFileCache(this.vecs.get(path)!.file);
const otherTags = (otherMeta?.tags ?? []).map((t) => t.tag.toLowerCase());
let sharedTags = 0;
for (const tag of otherTags) {
if (fileTags.has(tag)) sharedTags++;
if (sharedTags >= 3) break;
}
if (sharedTags > 0) s = Math.min(1.0, s + sharedTags * 0.05);
}
scores.push([path, s]);
}
scores.sort((a, b) => b[1] - a[1]);
return scores.slice(0, topK).map(([path, score]) => {
const { file: f } = this.vecs.get(path)!;
return { file: f, score, excerpt: "", title: f.basename, linked: linkedPaths.has(path) };
});
}
async search(query: string, topK = 8): Promise<SearchResult[]> {
if (!this.indexed) await this.buildIndex();
const qvec = await this.embed(query);
const scores: Array<[string, number]> = [];
for (const [path, { vec }] of this.vecs) {
const s = this.cosine(qvec, vec);
if (s > 0.2) scores.push([path, s]);
}
scores.sort((a, b) => b[1] - a[1]);
return scores.slice(0, topK).map(([path, score]) => {
const { file } = this.vecs.get(path)!;
return { file, score, excerpt: "", title: file.basename };
});
}
// ─── Cache I/O ───────────────────────────────────────────────────────────
/**
* Load all existing .ajson files from embedDir into this.cache.
* If the manifest model doesn't match, skip loading (full rebuild).
*/
private async loadCache(): Promise<void> {
this.cache.clear();
try {
const manifestRaw = await fsp.readFile(this.manifestPath, "utf8");
const manifest: Manifest = JSON.parse(manifestRaw);
if (manifest.model !== this.modelId) return; // model changed — rebuild all
} catch {
return; // no manifest yet — start fresh
}
await this.loadCacheDir(this.embedDir);
}
private async loadCacheDir(dir: string): Promise<void> {
let entries;
try { entries = await fsp.readdir(dir, { withFileTypes: true }); }
catch { return; }
for (const entry of entries) {
if (entry.name.startsWith(".")) continue; // skip .manifest.json
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
await this.loadCacheDir(fullPath);
} else if (entry.name.endsWith(".ajson")) {
try {
const raw = await fsp.readFile(fullPath, "utf8");
const { mtime, vec }: EmbedCacheEntry = JSON.parse(raw);
// Reconstruct vault path: relative path inside embedDir, swap .ajson → .md
const rel = relative(this.embedDir, fullPath).replace(/\.ajson$/, ".md");
// Normalise to forward slashes (vault paths always use /)
const vaultPath = rel.split("\\").join("/");
this.cache.set(vaultPath, { mtime, vec });
} catch {
// skip corrupt file
}
}
}
}
/** Write .ajson files for a batch of vault paths (no pruning). Called incrementally. */
private async flushBatch(vaultPaths: string[]): Promise<void> {
try {
const manifest: Manifest = { model: this.modelId, version: 1 };
await fsp.writeFile(this.manifestPath, JSON.stringify(manifest), "utf8");
for (const vaultPath of vaultPaths) {
const entry = this.cache.get(vaultPath);
if (!entry) continue;
const filePath = this.noteEmbedPath(vaultPath);
await fsp.mkdir(dirname(filePath), { recursive: true });
await fsp.writeFile(filePath, JSON.stringify({ mtime: entry.mtime, vec: entry.vec }), "utf8");
}
} catch (e) {
console.error("[Memex] flushBatch Fehler:", e);
}
}
/**
* Final save: flush any remaining changed notes, then prune stale .ajson files.
*/
private async saveCache(changed: string[], allVaultPaths: Set<string>): Promise<void> {
if (changed.length > 0) await this.flushBatch(changed);
await this.pruneStale(this.embedDir, allVaultPaths);
}
private async pruneStale(dir: string, allVaultPaths: Set<string>): Promise<void> {
let entries;
try { entries = await fsp.readdir(dir, { withFileTypes: true }); }
catch { return; }
for (const entry of entries) {
if (entry.name.startsWith(".")) continue;
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
await this.pruneStale(fullPath, allVaultPaths);
} else if (entry.name.endsWith(".ajson")) {
const rel = relative(this.embedDir, fullPath).replace(/\.ajson$/, ".md");
const vaultPath = rel.split("\\").join("/");
if (!allVaultPaths.has(vaultPath)) {
await fsp.unlink(fullPath).catch(() => {});
}
}
}
}
// ─── Text preprocessing ──────────────────────────────────────────────────
private preprocess(raw: string): string {
let c = raw;
if (c.startsWith("---")) {
const end = c.indexOf("\n---", 3);
if (end > 0) c = c.slice(end + 4);
}
c = c.replace(/\[\[([^\]|]+)(?:\|([^\]]+))?\]\]/g, (_, t, a) => a || t);
c = c.replace(/!\[.*?\]\(.*?\)/g, "");
c = c.replace(/\[([^\]]+)\]\(.*?\)/g, "$1");
c = c.replace(/^#{1,6}\s+/gm, "");
return c;
}
}

64
src/HybridSearch.ts Normal file
View File

@@ -0,0 +1,64 @@
import type { VaultSearch, SearchResult } from "./VaultSearch";
import type { EmbedSearch } from "./EmbedSearch";
const RRF_K = 60;
/**
* Combines TF-IDF and embedding search via Reciprocal Rank Fusion.
* Runs both engines in parallel; rank-merges results so neither score
* space needs normalization. TF-IDF excerpts are preserved in merged output.
*/
export class HybridSearch {
constructor(
private tfidf: VaultSearch,
private embed: EmbedSearch
) {}
isIndexed(): boolean {
return this.embed.isIndexed();
}
async search(query: string, topK = 8): Promise<SearchResult[]> {
const fetchK = topK * 3;
const [tfidfResults, embedResults] = await Promise.all([
this.tfidf.search(query, fetchK),
this.embed.search(query, fetchK),
]);
const tfidfRank = new Map<string, number>();
tfidfResults.forEach((r, i) => tfidfRank.set(r.file.path, i));
const embedRank = new Map<string, number>();
embedResults.forEach((r, i) => embedRank.set(r.file.path, i));
const tfidfMap = new Map(tfidfResults.map((r) => [r.file.path, r]));
const embedMap = new Map(embedResults.map((r) => [r.file.path, r]));
const allPaths = new Set<string>([
...tfidfResults.map((r) => r.file.path),
...embedResults.map((r) => r.file.path),
]);
const scored: Array<[string, number]> = [];
for (const path of allPaths) {
const tr = tfidfRank.has(path) ? 1 / (RRF_K + tfidfRank.get(path)! + 1) : 0;
const er = embedRank.has(path) ? 1 / (RRF_K + embedRank.get(path)! + 1) : 0;
scored.push([path, tr + er]);
}
scored.sort((a, b) => b[1] - a[1]);
return scored.slice(0, topK).map(([path, score]) => {
const t = tfidfMap.get(path);
const e = embedMap.get(path);
const base = t ?? e!;
return {
file: base.file,
score,
excerpt: t?.excerpt ?? "",
title: base.title,
linked: t?.linked ?? e?.linked,
};
});
}
}

93
src/RelatedNotesView.ts Normal file
View File

@@ -0,0 +1,93 @@
import { ItemView, TFile, WorkspaceLeaf } from "obsidian";
import type MemexChatPlugin from "./main";
export const VIEW_TYPE_RELATED = "memex-related-notes";
export class RelatedNotesView extends ItemView {
private plugin: MemexChatPlugin;
private refreshTimer: ReturnType<typeof setTimeout> | null = null;
constructor(leaf: WorkspaceLeaf, plugin: MemexChatPlugin) {
super(leaf);
this.plugin = plugin;
}
getViewType() { return VIEW_TYPE_RELATED; }
getDisplayText() { return "Verwandte Notizen"; }
getIcon() { return "sparkles"; }
async onOpen(): Promise<void> {
this.registerEvent(this.app.workspace.on("active-leaf-change", () => this.scheduleRefresh()));
this.registerEvent(this.app.workspace.on("file-open", () => this.scheduleRefresh()));
this.render([]);
this.scheduleRefresh();
}
private scheduleRefresh(delay = 400) {
if (this.refreshTimer) clearTimeout(this.refreshTimer);
this.refreshTimer = setTimeout(() => this.refresh(), delay);
}
/** Called by the plugin when the embedding index finishes building. */
onIndexReady() { this.scheduleRefresh(0); }
private async refresh() {
const file = this.app.workspace.getActiveFile();
if (!file || file.extension !== "md") return;
const es = this.plugin.embedSearch;
if (!es || !es.isIndexed()) {
this.renderStatus("Embedding-Index wird aufgebaut…");
return;
}
this.renderStatus("Suche verwandte Notizen…");
const results = await es.searchSimilarToFile(file);
this.render(results, file.basename);
}
private renderStatus(msg: string) {
this.contentEl.empty();
this.contentEl.createDiv({ cls: "vc-related-status", text: msg });
}
private render(results: Array<{ file: TFile; score: number; title: string }>, forNote?: string) {
this.contentEl.empty();
const header = this.contentEl.createDiv("vc-related-header");
header.createDiv({ cls: "vc-related-title", text: "Verwandte Notizen" });
if (forNote) header.createDiv({ cls: "vc-related-subtitle", text: forNote });
if (!results.length) {
this.contentEl.createDiv({ cls: "vc-related-status", text: forNote ? "Keine Treffer." : "" });
return;
}
const list = this.contentEl.createDiv("vc-related-list");
for (const r of results) {
const item = list.createDiv("vc-related-item");
const info = item.createDiv("vc-related-info");
const nameRow = info.createDiv("vc-related-name-row");
nameRow.createSpan({ cls: "vc-related-name", text: r.title });
if (r.linked) nameRow.createSpan({ cls: "vc-related-linked", text: "verknüpft" });
// Folder path (dimmed)
const folder = r.file.parent?.path;
if (folder && folder !== "/") {
info.createDiv({ cls: "vc-related-folder", text: folder });
}
// Similarity bar + percentage
const scoreWrap = item.createDiv("vc-related-score-wrap");
const pct = Math.round(r.score * 100);
const bar = scoreWrap.createDiv("vc-related-bar");
bar.createDiv({ cls: "vc-related-bar-fill" }).style.width = `${pct}%`;
scoreWrap.createDiv({ cls: "vc-related-pct", text: `${pct}%` });
item.addEventListener("click", () => {
this.app.workspace.openLinkText(r.file.path, r.file.path, false);
});
}
}
}

608
src/SettingsTab.ts Normal file
View File

@@ -0,0 +1,608 @@
import { App, ButtonComponent, DropdownComponent, Notice, PluginSettingTab, Setting } from "obsidian";
import type MemexChatPlugin from "./main";
import { EMBEDDING_MODELS } from "./EmbedSearch";
export interface PromptButton {
label: string;
filePath: string; // vault-relative path to the system-prompt note (without .md)
searchMode?: "date"; // if set: load notes by date range instead of TF-IDF
searchFolders?: string[]; // folders to restrict date search (empty = all vault)
helpText?: string; // shown as info panel + changes placeholder when button is active
}
export interface MemexChatSettings {
apiKey: string;
model: string;
maxTokens: number;
maxContextNotes: number;
maxCharsPerNote: number;
systemPrompt: string;
autoRetrieveContext: boolean;
showContextPreview: boolean;
saveThreadsToVault: boolean;
threadsFolder: string;
sendOnEnter: boolean;
contextProperties: string[];
promptButtons: PromptButton[];
systemContextFile: string; // optional vault path for extended system context
useEmbeddings: boolean; // use local embedding model instead of TF-IDF
embeddingModel: string; // HuggingFace model ID
embedExcludeFolders: string[]; // vault folders to skip during embedding
}
export const DEFAULT_SETTINGS: MemexChatSettings = {
apiKey: "",
model: "claude-opus-4-6",
maxTokens: 8192,
maxContextNotes: 6,
maxCharsPerNote: 2500,
systemPrompt: `Du bist ein hilfreicher Assistent mit Zugriff auf die persönliche Wissensdatenbank des Nutzers (Obsidian Vault).
Wenn du Fragen beantwortest:
- Nutze die bereitgestellten Notizen als primäre Wissensquelle
- Verweise auf relevante Notizen mit [[doppelten eckigen Klammern]]
- Antworte auf Deutsch, wenn die Frage auf Deutsch gestellt wird
- Wenn der Kontext unzureichend ist, sage das ehrlich und gib an, was noch fehlen könnte
- Verknüpfe Konzepte aus verschiedenen Notizen kreativ miteinander`,
autoRetrieveContext: true,
showContextPreview: true,
saveThreadsToVault: true,
threadsFolder: "Calendar/Chat",
sendOnEnter: false,
contextProperties: ["collection", "related", "up", "tags"],
systemContextFile: "",
useEmbeddings: false,
embeddingModel: "TaylorAI/bge-micro-v2",
embedExcludeFolders: [],
promptButtons: [
{
label: "Draft Check",
filePath: "Schreibdenken/ferals/Code/Prompts/COHERENCE CHECK",
helpText: "📝 DRAFT — Frühphase: Kernbotschaft, Kohärenz, grobe Struktur\n✂ PRE-PUBLISH — Fast fertig: Feinschliff, Sprache, Logik\n🔍 DIAGNOSTIC — Gezielte Analyse: ein spezifisches Problem benennen\n\nGib die Phase an und füge deinen Text mit @[[Notiz]] ein.",
},
{
label: "Monthly Check",
filePath: "Schreibdenken/ferals/Code/Prompts/MONTHLY COHERENCE AUDIT",
searchMode: "date",
searchFolders: ["Schreibdenken/ferals/Content/Artikel"],
},
],
};
export const MODELS = [
{ id: "claude-opus-4-6", name: "Claude Opus 4.6 (Stärkste)" },
{ id: "claude-sonnet-4-6", name: "Claude Sonnet 4.6 (Empfohlen)" },
{ id: "claude-haiku-4-5-20251001", name: "Claude Haiku 4.5 (Schnell)" },
];
export class MemexChatSettingsTab extends PluginSettingTab {
plugin: MemexChatPlugin;
constructor(app: App, plugin: MemexChatPlugin) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
// Sorted vault folder list — used by all folder autocompletes in this settings page
const allFolders = this.app.vault.getAllFolders()
.map((f) => f.path)
.filter((p) => p !== "/")
.sort();
/** Attaches a folder-search dropdown to a wrapper element. onPick is called with the selected folder. */
const attachFolderDropdown = (
wrap: HTMLElement,
input: HTMLInputElement,
getExcluded: () => string[],
onPick: (folder: string) => void,
) => {
const dropdown = wrap.createDiv("vc-folder-dropdown");
dropdown.style.display = "none";
const refresh = () => {
const q = input.value.toLowerCase();
const excluded = getExcluded();
const matches = allFolders
.filter((f) => f.toLowerCase().includes(q) && !excluded.includes(f))
.slice(0, 12);
dropdown.empty();
if (!matches.length) { dropdown.style.display = "none"; return; }
for (const f of matches) {
const item = dropdown.createDiv("vc-folder-item");
item.textContent = f;
item.addEventListener("mousedown", (e) => { e.preventDefault(); onPick(f); });
}
dropdown.style.display = "block";
};
input.addEventListener("input", refresh);
input.addEventListener("focus", refresh);
input.addEventListener("blur", () => setTimeout(() => { dropdown.style.display = "none"; }, 150));
input.addEventListener("keydown", (e) => {
if (e.key === "Escape") { dropdown.style.display = "none"; input.blur(); }
});
};
containerEl.createEl("h2", { text: "Memex Chat Einstellungen" });
containerEl.createEl("p", {
text: `Memex Chat v${this.plugin.manifest.version}`,
cls: "setting-item-description",
});
// --- API ---
containerEl.createEl("h3", { text: "Claude API" });
new Setting(containerEl)
.setName("API Key")
.setDesc("Dein Anthropic API Key (sk-ant-...)")
.addText((text) =>
text
.setPlaceholder("sk-ant-api03-...")
.setValue(this.plugin.settings.apiKey)
.onChange(async (value) => {
this.plugin.settings.apiKey = value.trim();
await this.plugin.saveSettings();
})
);
let modelDrop: DropdownComponent;
let refreshBtn: ButtonComponent;
new Setting(containerEl)
.setName("Modell")
.setDesc("Welches Claude-Modell verwenden? (Aktualisieren zeigt Roh-IDs)")
.addDropdown((drop) => {
modelDrop = drop;
for (const m of MODELS) drop.addOption(m.id, m.name);
drop.setValue(this.plugin.settings.model).onChange(async (value) => {
this.plugin.settings.model = value;
await this.plugin.saveSettings();
});
})
.addButton((btn) => {
refreshBtn = btn;
btn.setButtonText("Aktualisieren").onClick(async () => {
const prev = modelDrop.getValue();
refreshBtn.setDisabled(true);
refreshBtn.setButtonText("...");
try {
const models = await this.plugin.claude.fetchModels(this.plugin.settings.apiKey);
modelDrop.selectEl.empty();
for (const m of models) modelDrop.addOption(m.id, m.name);
modelDrop.setValue(prev);
this.plugin.settings.model = modelDrop.getValue();
await this.plugin.saveSettings();
} catch (err) {
new Notice("Modelle konnten nicht geladen werden: " + (err as Error).message);
} finally {
refreshBtn.setDisabled(false);
refreshBtn.setButtonText("Aktualisieren");
}
});
});
new Setting(containerEl)
.setName("Max. Antwort-Tokens")
.setDesc("Maximale Länge der Claude-Antwort. Für lange Analysen (z.B. Monthly Check) höher einstellen. (102416000)")
.addSlider((slider) =>
slider
.setLimits(1024, 16000, 512)
.setValue(this.plugin.settings.maxTokens)
.setDynamicTooltip()
.onChange(async (value) => {
this.plugin.settings.maxTokens = value;
await this.plugin.saveSettings();
})
);
new Setting(containerEl)
.setName("Senden mit Enter")
.setDesc("Ein: Enter sendet. Aus: Cmd+Enter sendet (Enter = neue Zeile)")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.sendOnEnter).onChange(async (value) => {
this.plugin.settings.sendOnEnter = value;
await this.plugin.saveSettings();
})
);
// --- Semantic Search ---
containerEl.createEl("h3", { text: "Semantische Suche (Embeddings)" });
containerEl.createEl("p", {
text: "Ersetzt TF-IDF durch ein lokales KI-Modell (Transformers.js). Das Modell wird beim ersten Einsatz von HuggingFace heruntergeladen und dann lokal gecacht. WASM-Laufzeit wird einmalig vom CDN geladen.",
cls: "setting-item-description",
});
new Setting(containerEl)
.setName("Semantische Suche aktivieren")
.setDesc("Nutzt lokale Embeddings für kontextbasierte Ähnlichkeitssuche statt TF-IDF")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.useEmbeddings).onChange(async (value) => {
this.plugin.settings.useEmbeddings = value;
await this.plugin.saveSettings();
await this.plugin.initEmbedSearch();
})
);
new Setting(containerEl)
.setName("Embedding-Modell")
.setDesc("Welches Modell für die semantische Suche verwenden? Kleiner = schneller, größer = besser.")
.addDropdown((drop) => {
for (const m of EMBEDDING_MODELS) drop.addOption(m.id, m.name);
drop.setValue(this.plugin.settings.embeddingModel).onChange(async (value) => {
this.plugin.settings.embeddingModel = value;
await this.plugin.saveSettings();
await this.plugin.initEmbedSearch();
});
});
// Exclude folders from embedding
const exclSetting = new Setting(containerEl)
.setName("Ordner ausschließen")
.setDesc("Diese Ordner werden beim Embedding übersprungen. Nach Änderung Index neu aufbauen.");
exclSetting.settingEl.style.flexWrap = "wrap";
exclSetting.settingEl.style.alignItems = "flex-start";
const exclTagContainer = exclSetting.controlEl.createDiv("vc-prop-tags");
const renderExclTags = () => {
exclTagContainer.empty();
for (const folder of this.plugin.settings.embedExcludeFolders) {
const tag = exclTagContainer.createEl("span", { cls: "vc-prop-tag" });
tag.createEl("span", { text: folder });
const x = tag.createEl("button", { cls: "vc-prop-tag-remove", text: "×" });
x.onclick = async () => {
this.plugin.settings.embedExcludeFolders =
this.plugin.settings.embedExcludeFolders.filter((f) => f !== folder);
await this.plugin.saveSettings();
renderExclTags();
};
}
};
renderExclTags();
const exclWrap = exclSetting.controlEl.createDiv("vc-folder-search-wrap");
const exclInput = exclWrap.createEl("input", {
cls: "vc-prop-input",
attr: { type: "text", placeholder: "Ordner suchen…" },
}) as HTMLInputElement;
const addExclFolder = async (folder: string) => {
folder = folder.trim().replace(/\/$/, "");
if (!folder || this.plugin.settings.embedExcludeFolders.includes(folder)) return;
this.plugin.settings.embedExcludeFolders = [...this.plugin.settings.embedExcludeFolders, folder];
await this.plugin.saveSettings();
exclInput.value = "";
renderExclTags();
};
attachFolderDropdown(exclWrap, exclInput,
() => this.plugin.settings.embedExcludeFolders,
(f) => addExclFolder(f),
);
exclInput.addEventListener("keydown", (e) => {
if (e.key === "Enter") { e.preventDefault(); addExclFolder(exclInput.value); }
});
// --- Context ---
containerEl.createEl("h3", { text: "Kontext-Einstellungen" });
new Setting(containerEl)
.setName("Max. Kontext-Notizen")
.setDesc("Wie viele Notizen werden automatisch als Kontext hinzugefügt? (115)")
.addSlider((slider) =>
slider
.setLimits(1, 15, 1)
.setValue(this.plugin.settings.maxContextNotes)
.setDynamicTooltip()
.onChange(async (value) => {
this.plugin.settings.maxContextNotes = value;
await this.plugin.saveSettings();
})
);
new Setting(containerEl)
.setName("Max. Zeichen pro Notiz")
.setDesc("Wie viele Zeichen einer Notiz in den Kontext einbezogen werden (10008000)")
.addSlider((slider) =>
slider
.setLimits(1000, 8000, 500)
.setValue(this.plugin.settings.maxCharsPerNote)
.setDynamicTooltip()
.onChange(async (value) => {
this.plugin.settings.maxCharsPerNote = value;
await this.plugin.saveSettings();
})
);
new Setting(containerEl)
.setName("Automatischer Kontext-Abruf")
.setDesc("Beim Senden automatisch relevante Notizen suchen und einbinden")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.autoRetrieveContext).onChange(async (value) => {
this.plugin.settings.autoRetrieveContext = value;
await this.plugin.saveSettings();
})
);
new Setting(containerEl)
.setName("Kontext-Vorschau anzeigen")
.setDesc("Vor dem Senden zeigen, welche Notizen als Kontext verwendet werden")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.showContextPreview).onChange(async (value) => {
this.plugin.settings.showContextPreview = value;
await this.plugin.saveSettings();
})
);
// --- Priority Properties ---
containerEl.createEl("h3", { text: "Prioritäts-Properties" });
containerEl.createEl("p", {
text: "Frontmatter-Properties, deren Werte bei der Kontextsuche stärker gewichtet werden (z.B. related, collection, up, tags). Nach Änderung den Index neu aufbauen.",
cls: "setting-item-description",
});
const propSetting = new Setting(containerEl).setName("Properties");
propSetting.settingEl.style.flexWrap = "wrap";
propSetting.settingEl.style.alignItems = "flex-start";
// Tag container
const tagContainer = propSetting.controlEl.createDiv("vc-prop-tags");
const renderTags = () => {
tagContainer.empty();
for (const prop of this.plugin.settings.contextProperties) {
const tag = tagContainer.createEl("span", { cls: "vc-prop-tag" });
tag.createEl("span", { text: prop });
const removeBtn = tag.createEl("button", { cls: "vc-prop-tag-remove", text: "×" });
removeBtn.onclick = async () => {
this.plugin.settings.contextProperties = this.plugin.settings.contextProperties.filter(
(p) => p !== prop
);
await this.plugin.saveSettings();
renderTags();
};
}
};
renderTags();
// Add input row
const addRow = propSetting.controlEl.createDiv("vc-prop-add-row");
const addInput = addRow.createEl("input", {
cls: "vc-prop-input",
attr: { type: "text", placeholder: "Property hinzufügen…" },
}) as HTMLInputElement;
const addBtn = addRow.createEl("button", { cls: "vc-prop-add-btn", text: "+" });
const doAdd = async () => {
const val = addInput.value.trim().toLowerCase();
if (!val || this.plugin.settings.contextProperties.includes(val)) return;
this.plugin.settings.contextProperties = [...this.plugin.settings.contextProperties, val];
await this.plugin.saveSettings();
addInput.value = "";
renderTags();
};
addBtn.onclick = doAdd;
addInput.addEventListener("keydown", (e) => {
if (e.key === "Enter") { e.preventDefault(); doAdd(); }
});
// --- Prompt Buttons ---
containerEl.createEl("h3", { text: "Prompt-Buttons" });
containerEl.createEl("p", {
text: "Buttons in der Chat-Leiste, die den System-Prompt um den Inhalt einer Vault-Notiz erweitern.",
cls: "setting-item-description",
});
const btnListEl = containerEl.createDiv("vc-pbtn-list");
const renderBtnList = () => {
btnListEl.empty();
for (const [idx, pb] of this.plugin.settings.promptButtons.entries()) {
const card = btnListEl.createDiv("vc-pbtn-card");
// ── Row 1: label / path / remove ──
const row1 = card.createDiv("vc-pbtn-row");
const labelInput = row1.createEl("input", {
cls: "vc-pbtn-input",
attr: { type: "text", placeholder: "Label", value: pb.label },
}) as HTMLInputElement;
labelInput.addEventListener("change", async () => {
this.plugin.settings.promptButtons[idx].label = labelInput.value.trim();
await this.plugin.saveSettings();
});
const pathInput = row1.createEl("input", {
cls: "vc-pbtn-input vc-pbtn-path",
attr: { type: "text", placeholder: "Pfad im Vault (ohne .md)", value: pb.filePath },
}) as HTMLInputElement;
pathInput.addEventListener("change", async () => {
this.plugin.settings.promptButtons[idx].filePath = pathInput.value.trim();
await this.plugin.saveSettings();
});
const removeBtn = row1.createEl("button", { cls: "vc-prop-tag-remove", text: "×" });
removeBtn.style.fontSize = "16px";
removeBtn.onclick = async () => {
this.plugin.settings.promptButtons.splice(idx, 1);
await this.plugin.saveSettings();
renderBtnList();
};
// ── Row 2: date-search toggle + folders ──
const row2 = card.createDiv("vc-pbtn-row2");
const toggleWrap = row2.createEl("label", { cls: "vc-pbtn-toggle-wrap" });
const checkbox = toggleWrap.createEl("input", { attr: { type: "checkbox" } }) as HTMLInputElement;
checkbox.checked = pb.searchMode === "date";
toggleWrap.appendText(" Datumsbasierte Suche");
const folderSection = row2.createDiv("vc-pbtn-folders");
folderSection.style.display = pb.searchMode === "date" ? "flex" : "none";
const renderFolders = () => {
folderSection.empty();
folderSection.createEl("span", { text: "Ordner: ", cls: "vc-pbtn-folder-label" });
for (const folder of (pb.searchFolders ?? [])) {
const chip = folderSection.createEl("span", { cls: "vc-prop-tag" });
chip.createEl("span", { text: folder });
const x = chip.createEl("button", { cls: "vc-prop-tag-remove", text: "×" });
x.onclick = async () => {
pb.searchFolders = (pb.searchFolders ?? []).filter((f) => f !== folder);
await this.plugin.saveSettings();
renderFolders();
};
}
const folderWrap = folderSection.createDiv("vc-folder-search-wrap");
folderWrap.style.width = "200px";
const folderInput = folderWrap.createEl("input", {
cls: "vc-pbtn-input",
attr: { type: "text", placeholder: "Ordner suchen…" },
}) as HTMLInputElement;
const doAddFolder = async (val: string) => {
val = val.trim().replace(/\/$/, "");
if (!val || (pb.searchFolders ?? []).includes(val)) return;
pb.searchFolders = [...(pb.searchFolders ?? []), val];
await this.plugin.saveSettings();
renderFolders();
};
attachFolderDropdown(folderWrap, folderInput, () => pb.searchFolders ?? [], (f) => doAddFolder(f));
folderInput.addEventListener("keydown", (e) => {
if (e.key === "Enter") { e.preventDefault(); doAddFolder(folderInput.value); }
});
};
renderFolders();
checkbox.addEventListener("change", async () => {
pb.searchMode = checkbox.checked ? "date" : undefined;
if (!checkbox.checked) pb.searchFolders = [];
folderSection.style.display = checkbox.checked ? "flex" : "none";
await this.plugin.saveSettings();
});
// ── Row 3: help text ──
const helpLabel = card.createEl("label", { cls: "vc-pbtn-folder-label", text: "Hilfetext (optional, erscheint im Chat wenn Button aktiv):" });
const helpTextArea = card.createEl("textarea", {
cls: "vc-pbtn-help-textarea",
attr: { placeholder: "z.B. DRAFT — Frühphase…\nPRE-PUBLISH — Fast fertig…" },
}) as HTMLTextAreaElement;
helpTextArea.value = pb.helpText ?? "";
// 1 row when empty, auto-fit to content when filled
const updateHelpRows = () => {
const lines = helpTextArea.value.split("\n").length;
helpTextArea.rows = helpTextArea.value.trim() ? Math.max(2, lines) : 1;
};
updateHelpRows();
helpTextArea.addEventListener("input", updateHelpRows);
helpTextArea.addEventListener("change", async () => {
pb.helpText = helpTextArea.value.trim() || undefined;
await this.plugin.saveSettings();
});
}
// ── Add row ──
const addRow = btnListEl.createDiv("vc-pbtn-add-row");
const newLabel = addRow.createEl("input", {
cls: "vc-pbtn-input",
attr: { type: "text", placeholder: "Label (z.B. Draft Check)" },
}) as HTMLInputElement;
const newPath = addRow.createEl("input", {
cls: "vc-pbtn-input vc-pbtn-path",
attr: { type: "text", placeholder: "Pfad/zur/Prompt-Notiz" },
}) as HTMLInputElement;
const addBtn = addRow.createEl("button", { cls: "vc-prop-add-btn", text: "+" });
addBtn.onclick = async () => {
const label = newLabel.value.trim();
const filePath = newPath.value.trim();
if (!label || !filePath) return;
this.plugin.settings.promptButtons.push({ label, filePath });
await this.plugin.saveSettings();
renderBtnList();
};
};
renderBtnList();
// --- Threads ---
containerEl.createEl("h3", { text: "Thread-History" });
new Setting(containerEl)
.setName("Threads im Vault speichern")
.setDesc("Chat-Threads als Markdown-Notizen im Vault ablegen")
.addToggle((toggle) =>
toggle.setValue(this.plugin.settings.saveThreadsToVault).onChange(async (value) => {
this.plugin.settings.saveThreadsToVault = value;
await this.plugin.saveSettings();
})
);
const threadsFolderSetting = new Setting(containerEl)
.setName("Threads-Ordner")
.setDesc("Pfad im Vault, wo Chat-Threads gespeichert werden");
const tfWrap = threadsFolderSetting.controlEl.createDiv("vc-folder-search-wrap");
const tfInput = tfWrap.createEl("input", {
cls: "vc-prop-input",
attr: { type: "text", placeholder: "Calendar/Chat" },
}) as HTMLInputElement;
tfInput.value = this.plugin.settings.threadsFolder;
tfInput.addEventListener("input", async () => {
this.plugin.settings.threadsFolder = tfInput.value;
await this.plugin.saveSettings();
});
attachFolderDropdown(tfWrap, tfInput, () => [], async (f) => {
tfInput.value = f;
this.plugin.settings.threadsFolder = f;
await this.plugin.saveSettings();
});
// --- System Prompt ---
containerEl.createEl("h3", { text: "System Prompt" });
new Setting(containerEl)
.setName("System Prompt")
.setDesc("Instruktionen für Claude (wie soll er sich verhalten?)")
.addTextArea((textarea) => {
textarea
.setValue(this.plugin.settings.systemPrompt)
.onChange(async (value) => {
this.plugin.settings.systemPrompt = value;
await this.plugin.saveSettings();
});
textarea.inputEl.rows = 8;
textarea.inputEl.style.width = "100%";
textarea.inputEl.style.fontFamily = "monospace";
textarea.inputEl.style.fontSize = "12px";
});
new Setting(containerEl)
.setName("System Context (Datei)")
.setDesc("Optionale Vault-Notiz, deren Inhalt an den System Prompt angehängt wird (Pfad ohne .md)")
.addText((text) =>
text
.setPlaceholder("z.B. Prompts/Mein System Context")
.setValue(this.plugin.settings.systemContextFile)
.onChange(async (value) => {
this.plugin.settings.systemContextFile = value.trim();
await this.plugin.saveSettings();
})
);
// --- Actions ---
containerEl.createEl("h3", { text: "Aktionen" });
new Setting(containerEl)
.setName("Index neu aufbauen")
.setDesc("Vault-Index neu aufbauen (TF-IDF oder Embedding-Index, je nach Einstellung)")
.addButton((btn) =>
btn
.setButtonText("Index neu aufbauen")
.setCta()
.onClick(async () => {
btn.setButtonText("Indiziere…");
btn.setDisabled(true);
await this.plugin.rebuildIndex();
btn.setButtonText("✓ Fertig!");
setTimeout(() => {
btn.setButtonText("Index neu aufbauen");
btn.setDisabled(false);
}, 2000);
})
);
}
}

263
src/VaultSearch.ts Normal file
View File

@@ -0,0 +1,263 @@
import { App, TFile } from "obsidian";
export interface SearchResult {
file: TFile;
score: number;
excerpt: string;
title: string;
/** True when the note is explicitly linked via a contextProperty frontmatter field */
linked?: boolean;
}
/** Minimal TF-IDF search engine over the Obsidian vault */
export class VaultSearch {
private app: App;
private docVectors: Map<string, Map<string, number>> = new Map(); // path -> term -> tfidf
private idf: Map<string, number> = new Map();
private docContents: Map<string, string> = new Map();
private indexed = false;
private indexing = false;
onProgress?: (done: number, total: number) => void;
/** Frontmatter properties whose values are boosted during indexing */
priorityProperties: string[] = ["collection", "related", "up", "tags"];
private readonly propertyBoost = 5; // tokens from priority properties count 5x
constructor(app: App) {
this.app = app;
}
/** Tokenize text: lowercase, split on non-word chars, keep umlauts */
private tokenize(text: string): string[] {
return text
.toLowerCase()
.replace(/[^\wäöüßÄÖÜ\s]/g, " ")
.split(/\s+/)
.filter((t) => t.length > 2);
}
/** Strip YAML frontmatter and Obsidian-specific markup */
private cleanContent(raw: string): string {
let content = raw;
// Remove frontmatter
if (content.startsWith("---")) {
const end = content.indexOf("\n---", 3);
if (end > 0) content = content.slice(end + 4);
}
// Unwrap wikilinks [[target|alias]] → alias or target
content = content.replace(/\[\[([^\]|]+)(?:\|([^\]]+))?\]\]/g, (_, target, alias) => alias || target);
// Remove markdown images/links
content = content.replace(/!\[.*?\]\(.*?\)/g, "");
content = content.replace(/\[([^\]]+)\]\(.*?\)/g, "$1");
// Remove callout syntax
content = content.replace(/>\s*\[!\w+\][+-]?\s*/g, "");
// Remove headers formatting (keep text)
content = content.replace(/^#{1,6}\s+/gm, "");
return content;
}
/** Build or rebuild the TF-IDF index */
async buildIndex(): Promise<void> {
if (this.indexing) return;
this.indexing = true;
this.indexed = false;
this.docVectors.clear();
this.idf.clear();
this.docContents.clear();
try {
const files = this.app.vault.getMarkdownFiles();
const total = files.length;
const df: Map<string, number> = new Map(); // term -> doc count
// Step 1: Read all files, compute TF
const tfs: Map<string, Map<string, number>> = new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (this.onProgress && i % 100 === 0) this.onProgress(i, total);
try {
const raw = await this.app.vault.cachedRead(file);
const clean = this.cleanContent(raw);
this.docContents.set(file.path, clean);
const tokens = this.tokenize(clean + " " + file.basename);
const tf: Map<string, number> = new Map();
for (const t of tokens) {
tf.set(t, (tf.get(t) ?? 0) + 1);
}
// Boost tokens from priority frontmatter properties
const fm = this.app.metadataCache.getFileCache(file)?.frontmatter ?? {};
for (const prop of this.priorityProperties) {
const val = fm[prop];
if (!val) continue;
const text = Array.isArray(val) ? val.join(" ") : String(val);
for (const t of this.tokenize(text)) {
tf.set(t, (tf.get(t) ?? 0) + this.propertyBoost);
}
}
// Normalize TF
const maxTf = Math.max(...tf.values(), 1);
const normalizedTf: Map<string, number> = new Map();
for (const [t, count] of tf) {
normalizedTf.set(t, count / maxTf);
}
tfs.set(file.path, normalizedTf);
// Update DF
for (const t of tf.keys()) {
df.set(t, (df.get(t) ?? 0) + 1);
}
} catch {
// skip unreadable files
}
}
// Step 2: Compute IDF and TF-IDF vectors
const N = files.length;
for (const [term, docCount] of df) {
this.idf.set(term, Math.log(N / docCount + 1));
}
for (const [path, tf] of tfs) {
const vec: Map<string, number> = new Map();
let norm = 0;
for (const [term, tfVal] of tf) {
const idfVal = this.idf.get(term) ?? 0;
const tfidf = tfVal * idfVal;
vec.set(term, tfidf);
norm += tfidf * tfidf;
}
// L2 normalize
norm = Math.sqrt(norm);
if (norm > 0) {
for (const [term, val] of vec) {
vec.set(term, val / norm);
}
}
this.docVectors.set(path, vec);
}
this.indexed = true;
if (this.onProgress) this.onProgress(total, total);
} finally {
// Always reset indexing so retries are possible if an error occurred
this.indexing = false;
}
}
isIndexed(): boolean {
return this.indexed;
}
/** Find notes with similar names (no index required). Uses substring + word-overlap scoring. */
findSimilarByName(query: string, topK = 2, minScore = 0.45): SearchResult[] {
const normalize = (s: string) =>
s.toLowerCase().replace(/[^\wäöüß\s]/gi, " ").trim();
const words = (s: string) => new Set(s.split(/\s+/).filter((w) => w.length > 1));
const q = normalize(query);
const qWords = words(q);
const scored: Array<[TFile, number]> = [];
for (const file of this.app.vault.getMarkdownFiles()) {
const name = normalize(file.basename);
const nameWords = words(name);
let score = 0;
// Substring containment
if (name.includes(q) || q.includes(name)) score = 0.9;
// Jaccard word overlap
const intersection = [...qWords].filter((w) => nameWords.has(w)).length;
const union = new Set([...qWords, ...nameWords]).size;
if (union > 0) score = Math.max(score, intersection / union);
if (score >= minScore) scored.push([file, score]);
}
scored.sort((a, b) => b[1] - a[1]);
return scored.slice(0, topK).map(([file, score]) => ({
file,
score,
excerpt: "",
title: file.basename,
}));
}
/** Search for the top-K most similar notes to the query */
async search(query: string, topK = 8): Promise<SearchResult[]> {
if (!this.indexed) await this.buildIndex();
const tokens = this.tokenize(query);
// Build query TF vector
const qtf: Map<string, number> = new Map();
for (const t of tokens) qtf.set(t, (qtf.get(t) ?? 0) + 1);
const qMax = Math.max(...qtf.values(), 1);
// Query TF-IDF normalized
const qvec: Map<string, number> = new Map();
let qnorm = 0;
for (const [t, count] of qtf) {
const tfidf = (count / qMax) * (this.idf.get(t) ?? 0);
qvec.set(t, tfidf);
qnorm += tfidf * tfidf;
}
qnorm = Math.sqrt(qnorm);
if (qnorm > 0) for (const [t, v] of qvec) qvec.set(t, v / qnorm);
// Score all documents
const scores: Array<[string, number]> = [];
for (const [path, vec] of this.docVectors) {
let score = 0;
for (const [t, qv] of qvec) {
const dv = vec.get(t) ?? 0;
score += qv * dv;
}
if (score > 0.01) scores.push([path, score]);
}
scores.sort((a, b) => b[1] - a[1]);
const top = scores.slice(0, topK);
const files = this.app.vault.getMarkdownFiles();
const fileMap = new Map<string, TFile>(files.map((f) => [f.path, f]));
return top
.map(([path, score]) => {
const file = fileMap.get(path);
if (!file) return null;
const content = this.docContents.get(path) ?? "";
const excerpt = this.buildExcerpt(content, query, 300);
return { file, score, excerpt, title: file.basename };
})
.filter(Boolean) as SearchResult[];
}
/** Get note content for context injection */
async getContent(file: TFile, maxChars = 3000): Promise<string> {
try {
const raw = await this.app.vault.cachedRead(file);
return this.cleanContent(raw).slice(0, maxChars);
} catch {
return "";
}
}
private buildExcerpt(content: string, query: string, maxLen: number): string {
const queryWords = query.toLowerCase().split(/\s+/);
const lower = content.toLowerCase();
let bestPos = 0;
let bestScore = 0;
for (let i = 0; i < content.length - maxLen; i += 50) {
const window = lower.slice(i, i + maxLen);
const score = queryWords.filter((w) => window.includes(w)).length;
if (score > bestScore) {
bestScore = score;
bestPos = i;
}
}
let excerpt = content.slice(bestPos, bestPos + maxLen).trim();
if (bestPos > 0) excerpt = "…" + excerpt;
if (bestPos + maxLen < content.length) excerpt += "…";
return excerpt;
}
}

288
src/main.ts Normal file
View File

@@ -0,0 +1,288 @@
import { Notice, Plugin, TFile } from "obsidian";
import { ChatView, VIEW_TYPE_MEMEX_CHAT } from "./ChatView";
import { VaultSearch } from "./VaultSearch";
import { EmbedSearch } from "./EmbedSearch";
import { HybridSearch } from "./HybridSearch";
import { ClaudeClient } from "./ClaudeClient";
import { MemexChatSettingsTab, MemexChatSettings, DEFAULT_SETTINGS } from "./SettingsTab";
import { RelatedNotesView, VIEW_TYPE_RELATED } from "./RelatedNotesView";
interface PluginData {
settings: MemexChatSettings;
threads: unknown[];
}
export default class MemexChatPlugin extends Plugin {
settings!: MemexChatSettings;
search!: VaultSearch;
embedSearch: EmbedSearch | null = null;
hybridSearch: HybridSearch | null = null;
claude!: ClaudeClient;
data!: PluginData;
/** Returns the active search engine: HybridSearch when embeddings are ready, else VaultSearch */
get activeSearch(): VaultSearch | HybridSearch {
return this.hybridSearch ?? this.search;
}
async onload(): Promise<void> {
// Load data
const loaded = (await this.loadData()) as PluginData | null;
const mergedSettings: MemexChatSettings = { ...DEFAULT_SETTINGS, ...(loaded?.settings ?? {}) };
// Merge promptButtons per-entry so new fields (e.g. helpText) from defaults aren't lost
if (loaded?.settings?.promptButtons) {
mergedSettings.promptButtons = loaded.settings.promptButtons.map((saved, i) => ({
...(DEFAULT_SETTINGS.promptButtons[i] ?? {}),
...saved,
}));
}
this.data = {
settings: mergedSettings,
threads: loaded?.threads ?? [],
};
this.settings = this.data.settings;
// Init services
this.search = new VaultSearch(this.app);
this.claude = new ClaudeClient();
// Register views
this.registerView(VIEW_TYPE_MEMEX_CHAT, (leaf) => new ChatView(leaf, this));
this.registerView(VIEW_TYPE_RELATED, (leaf) => new RelatedNotesView(leaf, this));
// Ribbon icons
this.addRibbonIcon("message-circle", "Memex Chat öffnen", () => {
this.activateView();
});
this.addRibbonIcon("sparkles", "Verwandte Notizen", () => {
this.activateRelatedView();
});
// Commands
this.addCommand({
id: "open-memex-chat",
name: "Memex Chat öffnen",
callback: () => this.activateView(),
});
this.addCommand({
id: "memex-related-notes",
name: "Verwandte Notizen anzeigen",
callback: () => this.activateRelatedView(),
});
this.addCommand({
id: "memex-chat-rebuild-index",
name: "Memex Chat: Index neu aufbauen",
callback: () => this.rebuildIndex(),
});
this.addCommand({
id: "memex-chat-active-note",
name: "Memex Chat: Aktive Notiz als Kontext",
callback: () => {
const file = this.app.workspace.getActiveFile();
if (file) {
this.activateView().then(() => {
// Pre-fill with active note path
const leaf = this.app.workspace.getLeavesOfType(VIEW_TYPE_MEMEX_CHAT)[0];
if (leaf) {
const view = leaf.view as ChatView;
view.newThread();
view.setInputValue(`Erkläre und verknüpfe [[${file.basename}]] mit anderen Konzepten im Vault.`);
view.setExplicitContext([file]);
}
});
}
},
});
// Settings tab
this.addSettingTab(new MemexChatSettingsTab(this.app, this));
// Re-embed modified notes as they change (registered once; guard handles embedSearch being null)
this.registerEvent(
this.app.vault.on("modify", (file) => {
if (this.embedSearch && file instanceof TFile && file.extension === "md")
this.embedSearch.reembedFile(file);
})
);
// Build index once the workspace layout (and vault cache) is fully ready
this.app.workspace.onLayoutReady(() => {
if (!this.search.isIndexed()) {
this.search.priorityProperties = this.settings.contextProperties;
this.search.buildIndex().catch(console.error);
}
if (this.settings.useEmbeddings) {
this.initEmbedSearch().catch(console.error);
}
});
}
onunload(): void {
this.app.workspace.detachLeavesOfType(VIEW_TYPE_MEMEX_CHAT);
}
async activateView(): Promise<void> {
const existing = this.app.workspace.getLeavesOfType(VIEW_TYPE_MEMEX_CHAT);
if (existing.length > 0) { this.app.workspace.revealLeaf(existing[0]); return; }
const leaf = this.app.workspace.getLeaf("tab");
if (!leaf) return;
await leaf.setViewState({ type: VIEW_TYPE_MEMEX_CHAT, active: true });
this.app.workspace.revealLeaf(leaf);
}
async activateRelatedView(): Promise<void> {
const existing = this.app.workspace.getLeavesOfType(VIEW_TYPE_RELATED);
if (existing.length > 0) { this.app.workspace.revealLeaf(existing[0]); return; }
const leaf = this.app.workspace.getRightLeaf(false);
if (!leaf) return;
await leaf.setViewState({ type: VIEW_TYPE_RELATED, active: true });
this.app.workspace.revealLeaf(leaf);
}
private notifyRelatedView() {
this.app.workspace.getLeavesOfType(VIEW_TYPE_RELATED).forEach((l) => {
if (l.view instanceof RelatedNotesView) l.view.onIndexReady();
});
}
/** Create or recreate the EmbedSearch instance (called when settings change) */
async initEmbedSearch(): Promise<void> {
if (!this.settings.useEmbeddings) {
this.embedSearch = null;
this.hybridSearch = null;
return;
}
this.embedSearch = new EmbedSearch(this.app, this.settings.embeddingModel);
this.embedSearch.excludeFolders = this.settings.embedExcludeFolders ?? [];
this.embedSearch.contextProperties = this.settings.contextProperties ?? [];
const modelShort = this.settings.embeddingModel.split("/").pop() ?? this.settings.embeddingModel;
// Persistent notice updated during background indexing
const notice = new Notice(`Memex [${modelShort}]: Embedding wird vorbereitet…`, 0);
this.embedSearch.onModelStatus = (status) => {
notice.setMessage(`Memex [${modelShort}]: ${status}`);
};
this.embedSearch.onProgress = (done, total, speed) => {
const speedStr = speed > 0 ? `${speed.toFixed(1)} N/s` : "";
const remaining = speed > 0 && done < total ? (total - done) / speed : 0;
const eta = remaining > 0
? ` • ~${remaining < 60 ? Math.ceil(remaining) + "s" : Math.ceil(remaining / 60) + "min"}`
: "";
notice.setMessage(`Memex [${modelShort}]: ${done}/${total}${speedStr}${eta}`);
};
// Wait for Obsidian Sync to finish before starting (avoids embedding stale/partial files)
this.waitForSyncIdle(notice).then(() => this.embedSearch?.buildIndex())
.then(() => {
if (this.embedSearch) this.hybridSearch = new HybridSearch(this.search, this.embedSearch);
notice.setMessage(`✓ Memex [${modelShort}]: ${this.app.vault.getMarkdownFiles().length} Notizen eingebettet`);
setTimeout(() => notice.hide(), 4000);
this.notifyRelatedView();
})
.catch((e) => {
notice.setMessage(`✗ Memex Embedding: ${(e as Error).message}`);
setTimeout(() => notice.hide(), 6000);
console.error(e);
})
.finally(() => {
if (this.embedSearch) {
this.embedSearch.onProgress = undefined;
this.embedSearch.onModelStatus = undefined;
}
});
}
async rebuildIndex(): Promise<void> {
const leaves = this.app.workspace.getLeavesOfType(VIEW_TYPE_MEMEX_CHAT);
const view = leaves[0]?.view as ChatView | undefined;
if (this.settings.useEmbeddings && this.embedSearch) {
// Rebuild semantic (embedding) index
this.hybridSearch = null;
this.embedSearch.onModelStatus = (status) => {
if (view) view.setStatus(status);
};
this.embedSearch.onProgress = (done, total, speed) => {
if (view) {
const speedStr = speed > 0 ? `${speed.toFixed(1)} N/s` : "";
const eta = speed > 0 && done < total
? ` • noch ~${Math.ceil((total - done) / speed)}s`
: "";
view.setStatus(`Embedding ${done}/${total}${speedStr}${eta}`);
}
};
await this.embedSearch.buildIndex();
this.hybridSearch = new HybridSearch(this.search, this.embedSearch);
this.embedSearch.onProgress = undefined;
this.embedSearch.onModelStatus = undefined;
} else {
// Rebuild TF-IDF index
this.search.priorityProperties = this.settings.contextProperties;
this.search.onProgress = (done, total) => {
if (view && done % 200 === 0) {
view.setStatus(`Indiziere… ${done}/${total}`);
}
};
await this.search.buildIndex();
this.search.onProgress = undefined;
}
if (view) {
view.setStatus(`${this.app.vault.getMarkdownFiles().length} Notizen indiziert`);
setTimeout(() => view.setStatus(""), 3000);
}
}
/**
* Waits until Obsidian Sync is idle.
* Strategy: watch for vault changes; if activity stops for 15 s, sync is done.
* If no activity within the first 5 s, sync isn't running — return immediately.
* Falls back after 5 minutes regardless.
*/
private async waitForSyncIdle(notice: Notice): Promise<void> {
// Only wait if the Sync plugin is installed
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const syncPlugin = (this.app as any).internalPlugins?.plugins?.["sync"]?.instance;
if (!syncPlugin) return;
const PROBE_MS = 5_000; // time to detect if sync is active
const QUIET_MS = 15_000; // idle period that signals sync completion
const MAX_MS = 5 * 60_000;
let lastChange = 0;
let activitySeen = false;
const tick = () => { lastChange = Date.now(); activitySeen = true; };
this.app.vault.on("create", tick);
this.app.vault.on("modify", tick);
this.app.vault.on("delete", tick);
try {
notice.setMessage("Memex: Prüfe Sync-Status…");
await new Promise((r) => setTimeout(r, PROBE_MS));
if (!activitySeen) return; // no sync activity → proceed immediately
notice.setMessage("Memex: Warte auf Obsidian Sync…");
const deadline = Date.now() + MAX_MS;
while (Date.now() < deadline) {
await new Promise((r) => setTimeout(r, 2_000));
if (Date.now() - lastChange >= QUIET_MS) return; // 15 s quiet → done
}
// Max wait reached — proceed anyway
} finally {
this.app.vault.off("create", tick);
this.app.vault.off("modify", tick);
this.app.vault.off("delete", tick);
}
}
async saveSettings(): Promise<void> {
this.data.settings = this.settings;
await this.saveData(this.data);
}
}

1024
styles.css Normal file

File diff suppressed because it is too large Load Diff

17
tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"baseUrl": ".",
"inlineSourceMap": true,
"inlineSources": true,
"module": "ESNext",
"target": "ES2018",
"allowImportingTsExtensions": true,
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"importHelpers": true,
"isolatedModules": true,
"strictNullChecks": true,
"lib": ["ES2018", "DOM"]
},
"include": ["src/**/*.ts"]
}

4
versions.json Normal file
View File

@@ -0,0 +1,4 @@
{
"1.0.2": "1.4.0",
"1.0.3": "1.4.0"
}