Hopsakee commited on
Commit
2197ab7
·
verified ·
1 Parent(s): cb9905b

Upload folder using huggingface_hub

Browse files
Files changed (46) hide show
  1. .github/workflows/update_space.yml +28 -0
  2. .gitignore +170 -0
  3. .gradio/certificate.pem +31 -0
  4. .pre-commit-config.yaml +20 -0
  5. .streamlit/config.toml +2 -0
  6. LICENSE +21 -0
  7. README.md +98 -7
  8. __init__.py +0 -0
  9. data/Fab2Esp_transparent.png +0 -0
  10. data/Fab2Esp_transparent.png:Zone.Identifier +4 -0
  11. gradio_app_query_only.py +114 -0
  12. lib - Shortcut.lnk +0 -0
  13. logs/fabric_to_espanso.log.1 +0 -0
  14. main.py +120 -0
  15. parameters.py +64 -0
  16. pdm.lock +0 -0
  17. pyproject.toml +36 -0
  18. requirements.txt +548 -0
  19. setup.py +7 -0
  20. src/__init__.py +0 -0
  21. src/fabrics_processor/__init__.py +1 -0
  22. src/fabrics_processor/config.py +129 -0
  23. src/fabrics_processor/database.py +218 -0
  24. src/fabrics_processor/database_updater.py +147 -0
  25. src/fabrics_processor/exceptions.py +37 -0
  26. src/fabrics_processor/file_change_detector.py +132 -0
  27. src/fabrics_processor/file_processor.py +141 -0
  28. src/fabrics_processor/logger.py +60 -0
  29. src/fabrics_processor/markdown_parser.py +83 -0
  30. src/fabrics_processor/obsidian2fabric.py +90 -0
  31. src/fabrics_processor/output_files_generator.py +157 -0
  32. src/fabrics_processor/output_files_generator_temp.py +113 -0
  33. src/search_qdrant/__init__.py +0 -0
  34. src/search_qdrant/database_query.py +57 -0
  35. src/search_qdrant/logs/fabric_to_espanso.log.1 +135 -0
  36. src/search_qdrant/logs/fabric_to_espanso.log.2 +136 -0
  37. src/search_qdrant/logs/fabric_to_espanso.log.3 +136 -0
  38. src/search_qdrant/logs/fabric_to_espanso.log.4 +0 -0
  39. src/search_qdrant/run_query.sh +7 -0
  40. src/search_qdrant/run_streamlit.bup2 +23 -0
  41. src/search_qdrant/run_streamlit.sh +49 -0
  42. src/search_qdrant/run_streamlit_query_only_terminal_visible.sh +13 -0
  43. src/search_qdrant/run_streamlit_terminal_visible.sh +13 -0
  44. src/search_qdrant/streamlit_app.py +257 -0
  45. streamlit_app_query_only.py +143 -0
  46. tests/__init__.py +0 -0
.github/workflows/update_space.yml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Run Python script
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ build:
10
+ runs-on: ubuntu-latest
11
+
12
+ steps:
13
+ - name: Checkout
14
+ uses: actions/checkout@v2
15
+
16
+ - name: Set up Python
17
+ uses: actions/setup-python@v2
18
+ with:
19
+ python-version: '3.9'
20
+
21
+ - name: Install Gradio
22
+ run: python -m pip install gradio
23
+
24
+ - name: Log in to Hugging Face
25
+ run: python -c 'import huggingface_hub; huggingface_hub.login(token="${{ secrets.hf_token }}")'
26
+
27
+ - name: Deploy to Spaces
28
+ run: gradio deploy
.gitignore ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Own additions
2
+ nohup.out
3
+ .streamlit/secrets.toml
4
+ secret*
5
+ *.ipynb
6
+
7
+ # Byte-compiled / optimized / DLL files
8
+ __pycache__/
9
+ *.py[cod]
10
+ *$py.class
11
+
12
+ # C extensions
13
+ *.so
14
+
15
+ # Distribution / packaging
16
+ .Python
17
+ build/
18
+ develop-eggs/
19
+ dist/
20
+ downloads/
21
+ eggs/
22
+ .eggs/
23
+ lib/
24
+ lib64/
25
+ parts/
26
+ sdist/
27
+ var/
28
+ wheels/
29
+ share/python-wheels/
30
+ *.egg-info/
31
+ .installed.cfg
32
+ *.egg
33
+ MANIFEST
34
+
35
+ # PyInstaller
36
+ # Usually these files are written by a python script from a template
37
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
38
+ *.manifest
39
+ *.spec
40
+
41
+ # Installer logs
42
+ pip-log.txt
43
+ pip-delete-this-directory.txt
44
+
45
+ # Unit test / coverage reports
46
+ htmlcov/
47
+ .tox/
48
+ .nox/
49
+ .coverage
50
+ .coverage.*
51
+ .cache
52
+ nosetests.xml
53
+ coverage.xml
54
+ *.cover
55
+ *.py,cover
56
+ .hypothesis/
57
+ .pytest_cache/
58
+ cover/
59
+
60
+ # Translations
61
+ *.mo
62
+ *.pot
63
+
64
+ # Django stuff:
65
+ *.log
66
+ local_settings.py
67
+ db.sqlite3
68
+ db.sqlite3-journal
69
+
70
+ # Flask stuff:
71
+ instance/
72
+ .webassets-cache
73
+
74
+ # Scrapy stuff:
75
+ .scrapy
76
+
77
+ # Sphinx documentation
78
+ docs/_build/
79
+
80
+ # PyBuilder
81
+ .pybuilder/
82
+ target/
83
+
84
+ # Jupyter Notebook
85
+ .ipynb_checkpoints
86
+
87
+ # IPython
88
+ profile_default/
89
+ ipython_config.py
90
+
91
+ # pyenv
92
+ # For a library or package, you might want to ignore these files since the code is
93
+ # intended to run in multiple environments; otherwise, check them in:
94
+ # .python-version
95
+
96
+ # pipenv
97
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
98
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
99
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
100
+ # install all needed dependencies.
101
+ #Pipfile.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+
110
+ # pdm
111
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112
+ #pdm.lock
113
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114
+ # in version control.
115
+ # https://pdm-project.org/#use-with-ide
116
+ .pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121
+ __pypackages__/
122
+
123
+ # Celery stuff
124
+ celerybeat-schedule
125
+ celerybeat.pid
126
+
127
+ # SageMath parsed files
128
+ *.sage.py
129
+
130
+ # Environments
131
+ .env
132
+ .venv
133
+ env/
134
+ venv/
135
+ ENV/
136
+ env.bak/
137
+ venv.bak/
138
+
139
+ # Spyder project settings
140
+ .spyderproject
141
+ .spyproject
142
+
143
+ # Rope project settings
144
+ .ropeproject
145
+
146
+ # mkdocs documentation
147
+ /site
148
+
149
+ # mypy
150
+ .mypy_cache/
151
+ .dmypy.json
152
+ dmypy.json
153
+
154
+ # Pyre type checker
155
+ .pyre/
156
+
157
+ # pytype static type analyzer
158
+ .pytype/
159
+
160
+ # Cython debug symbols
161
+ cython_debug/
162
+
163
+ # PyCharm
164
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
167
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168
+ #.idea/
169
+ .streamlit/secrets.toml
170
+ scratch.ipynb
.gradio/certificate.pem ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
3
+ TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
4
+ cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
5
+ WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
6
+ ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
7
+ MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
8
+ h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
9
+ 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
10
+ A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
11
+ T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
12
+ B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
13
+ B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
14
+ KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
15
+ OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
16
+ jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
17
+ qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
18
+ rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
19
+ HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
20
+ hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
21
+ ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
22
+ 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
23
+ NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
24
+ ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
25
+ TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
26
+ jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
27
+ oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
28
+ 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
29
+ mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
30
+ emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
31
+ -----END CERTIFICATE-----
.pre-commit-config.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # .pre-commit-config.yaml
2
+ repos:
3
+ - repo: https://github.com/pre-commit/pre-commit-hooks
4
+ rev: v4.4.0
5
+ hooks:
6
+ - id: trailing-whitespace
7
+ - id: end-of-file-fixer
8
+ - id: check-yaml
9
+ - id: check-added-large-files
10
+
11
+ - repo: https://github.com/psf/black
12
+ rev: 23.7.0
13
+ hooks:
14
+ - id: black
15
+
16
+ - repo: https://github.com/pre-commit/mirrors-mypy
17
+ rev: v1.4.1
18
+ hooks:
19
+ - id: mypy
20
+ additional_dependencies: [types-all]
.streamlit/config.toml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [server]
2
+ # Add any Streamlit-specific configurations here
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Jelle de Jong
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,12 +1,103 @@
1
  ---
2
- title: Fabric To Espanso
3
- emoji: 🔥
4
- colorFrom: purple
5
- colorTo: pink
6
  sdk: gradio
7
  sdk_version: 5.12.0
8
- app_file: app.py
9
- pinned: false
10
  ---
 
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: fabric_to_espanso
3
+ app_file: gradio_app_query_only.py
 
 
4
  sdk: gradio
5
  sdk_version: 5.12.0
 
 
6
  ---
7
+ # Fabric to Espanso Converter
8
 
9
+ A Python application that bridges Fabric prompts with Espanso by managing and converting prompts through a vector database.
10
+
11
+ ## Features
12
+
13
+ - Store and manage Fabric prompts in a Qdrant vector database
14
+ - Convert stored prompts into Espanso YAML format for system-wide usage
15
+ - Semantic search functionality to find relevant prompts based on their meaning
16
+ - Web interface for easy interaction with the prompt database
17
+
18
+ ## Prerequisites
19
+
20
+ - Python 3.11
21
+ - Qdrant vector database server (local or cloud)
22
+ - Obsidian with MeshAI plugin installed
23
+ - Windows (for PowerShell script) or Linux/WSL for direct execution
24
+
25
+ ## Installation
26
+
27
+ 1. Install Obsidian and the MeshAI plugin
28
+ 2. In Obsidian, create the following folder structure:
29
+ ```
30
+ Extra/
31
+ └── FabricPatterns/
32
+ ├── Official/ # For downloaded Fabric patterns
33
+ └── Own/ # For your custom additions
34
+ ```
35
+ 3. Clone this repository
36
+ 4. Install dependencies using PDM:
37
+ ```bash
38
+ pdm install
39
+ ```
40
+ 5. Configure your Qdrant server connection in the application settings
41
+
42
+ ## Usage
43
+
44
+ ### Linux/WSL
45
+
46
+ Run the Streamlit application directly:
47
+ ```bash
48
+ ./src/search_qdrant/run_streamlit.sh
49
+ ```
50
+
51
+ ### Windows
52
+
53
+ Create a PowerShell script with the following content to start the application:
54
+
55
+ ```powershell
56
+ # Start WSL process without showing window
57
+ $startInfo = New-Object System.Diagnostics.ProcessStartInfo
58
+ $startInfo.Filename = "wsl.exe"
59
+ # Use -c flag to let the command use the WSL2 Ubuntu folder system and not the Windows
60
+ $startInfo.Arguments = "bash -c ~/Tools/pythagora-core/workspace/fabrics_processor/src/search_qdrant/run_streamlit.sh"
61
+ $startInfo.UseShellExecute = $false
62
+ $startInfo.RedirectStandardOutput = $true
63
+ $startInfo.RedirectStandardError = $true
64
+ $startInfo.WindowStyle = [System.Diagnostics.ProcessWindowStyle]::Hidden
65
+ $startInfo.CreateNoWindow = $true
66
+
67
+ # Start the process
68
+ try {
69
+ $process = [System.Diagnostics.Process]::Start($startInfo)
70
+ Start-Sleep -Seconds 5
71
+
72
+ # Check if Streamlit is actually running
73
+ $streamlitRunning = Test-NetConnection -ComputerName localhost -Port 8501 -WarningAction SilentlyContinue
74
+
75
+ if ($streamlitRunning.TcpTestSucceeded) {
76
+ Start-Process "msedge.exe" "--app=http://localhost:8501"
77
+ } else {
78
+ Write-Error "Failed to start Streamlit application"
79
+ }
80
+ } catch {
81
+ Write-Error "Error starting Streamlit: $_"
82
+ }
83
+ ```
84
+
85
+ This script will:
86
+ 1. Start the Streamlit server if it's not already running
87
+ 2. Open the application in Microsoft Edge in app mode
88
+ 3. Automatically handle server startup and connection
89
+
90
+ ## Dependencies
91
+
92
+ - ipykernel >= 6.29.5
93
+ - markdown >= 3.7
94
+ - pyyaml >= 6.0.2
95
+ - qdrant-client >= 1.12.1
96
+ - fastembed >= 0.4.2
97
+ - streamlit >= 1.41.1
98
+ - pyperclip >= 1.9.0
99
+ - regex >= 2024.11.6
100
+
101
+ ## License
102
+
103
+ This project is licensed under the MIT License.
__init__.py ADDED
File without changes
data/Fab2Esp_transparent.png ADDED
data/Fab2Esp_transparent.png:Zone.Identifier ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [ZoneTransfer]
2
+ ZoneId=3
3
+ ReferrerUrl=https://www7.lunapic.com/editor/?action=save
4
+ HostUrl=https://www7.lunapic.com/editor/saveas.php
gradio_app_query_only.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pyperclip
3
+ from src.fabrics_processor.database import initialize_qdrant_database
4
+ from src.search_qdrant.database_query import query_qdrant_database
5
+ from src.fabrics_processor.logger import setup_logger
6
+ import logging
7
+ import atexit
8
+ from src.fabrics_processor.config import config
9
+ import time
10
+ import os
11
+ from dotenv import load_dotenv
12
+
13
+ # Load environment variables from .env file
14
+ load_dotenv()
15
+
16
+ # Configure logging
17
+ logger = setup_logger()
18
+
19
+ # Initialize the database client
20
+ client = None
21
+ def init_client():
22
+ global client
23
+ if client is None:
24
+ client = initialize_qdrant_database(api_key=os.environ.get("QDRANT_API_KEY"))
25
+ # Register cleanup function
26
+ atexit.register(lambda: client.close() if hasattr(client, '_transport') else None)
27
+ return client
28
+
29
+ def search_prompts(query):
30
+ """Search for prompts based on the query."""
31
+ try:
32
+ client = init_client()
33
+ results = query_qdrant_database(
34
+ query=query,
35
+ client=client,
36
+ num_results=5,
37
+ collection_name=config.embedding.collection_name
38
+ )
39
+
40
+ if not results:
41
+ return gr.Radio(choices=[]), None
42
+
43
+ # Format results for radio buttons - just filenames
44
+ filenames = [r.metadata['filename'] for r in results]
45
+ # Store the full results for later use
46
+ global current_results
47
+ current_results = results
48
+ return gr.Radio(choices=filenames), None
49
+
50
+ except Exception as e:
51
+ logger.error(f"Error during search: {str(e)}")
52
+ return gr.Radio(choices=[]), None
53
+
54
+ def show_selected_prompt(selected_filename):
55
+ """Display the content of the selected prompt."""
56
+ if not selected_filename or not current_results:
57
+ return ""
58
+
59
+ # Find the selected result
60
+ selected_prompt = next(
61
+ (r for r in current_results if r.metadata['filename'] == selected_filename),
62
+ None
63
+ )
64
+
65
+ if selected_prompt:
66
+ return selected_prompt.metadata['content']
67
+ return ""
68
+
69
+ # Store current results globally
70
+ global current_results
71
+ current_results = []
72
+
73
+ with gr.Blocks() as demo:
74
+ gr.Markdown("# Prompt finding and comparing")
75
+
76
+ with gr.Column():
77
+ query_input = gr.Textbox(
78
+ label="What are you trying to accomplish? I will then search for good prompts to give you a good start.",
79
+ lines=3,
80
+ autofocus=True, # This will focus the textbox when the page loads
81
+ interactive=True # This enables keyboard events
82
+ )
83
+ search_button = gr.Button("Search")
84
+
85
+ # Radio buttons for selecting prompts
86
+ results_radio = gr.Radio(
87
+ choices=[],
88
+ label="Select a prompt",
89
+ interactive=True
90
+ )
91
+
92
+ # Display area for selected prompt using Markdown
93
+ selected_prompt_display = gr.Markdown(label="Selected Prompt", show_copy_button=True)
94
+
95
+ # Set up event handlers
96
+ query_input.submit(
97
+ fn=search_prompts,
98
+ inputs=[query_input],
99
+ outputs=[results_radio, selected_prompt_display]
100
+ )
101
+ search_button.click(
102
+ fn=search_prompts,
103
+ inputs=[query_input],
104
+ outputs=[results_radio, selected_prompt_display]
105
+ )
106
+
107
+ results_radio.change(
108
+ fn=show_selected_prompt,
109
+ inputs=[results_radio],
110
+ outputs=[selected_prompt_display]
111
+ )
112
+
113
+ if __name__ == "__main__":
114
+ demo.launch(share=True)
lib - Shortcut.lnk ADDED
Binary file (2.72 kB). View file
 
logs/fabric_to_espanso.log.1 ADDED
The diff for this file is too large to render. See raw diff
 
main.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Main entry point for the Fabric to Espanso conversion process."""
2
+ from typing import Optional
3
+ import sys
4
+ import signal
5
+ import logging
6
+ from contextlib import contextmanager
7
+
8
+ from src.fabrics_processor.database import initialize_qdrant_database
9
+ from src.fabrics_processor.file_change_detector import detect_file_changes
10
+ from src.fabrics_processor.database_updater import update_qdrant_database
11
+ from src.fabrics_processor.yaml_file_generator import generate_yaml_file
12
+ from src.fabrics_processor.logger import setup_logger
13
+ from src.fabrics_processor.config import config
14
+ from src.fabrics_processor.exceptions import (
15
+ DatabaseConnectionError,
16
+ DatabaseInitializationError
17
+ )
18
+
19
+ # Setup logger
20
+ logger = setup_logger()
21
+
22
+ class GracefulExit(SystemExit):
23
+ """Custom exception for graceful shutdown."""
24
+ pass
25
+
26
+ def signal_handler(signum, frame):
27
+ """Handle shutdown signals gracefully."""
28
+ logger.info(f"Received signal {signum}. Initiating graceful shutdown...")
29
+ raise GracefulExit()
30
+
31
+ @contextmanager
32
+ def managed_qdrant_client():
33
+ """Context manager for handling Qdrant client lifecycle."""
34
+ client = None
35
+ try:
36
+ client = initialize_qdrant_database()
37
+ yield client
38
+ finally:
39
+ if client:
40
+ logger.info("Closing Qdrant client connection...")
41
+ client.close()
42
+ logger.info("Qdrant client connection closed")
43
+
44
+ def process_changes(client) -> bool:
45
+ """Process file changes and update database and YAML files.
46
+
47
+ Args:
48
+ client: Initialized Qdrant client
49
+
50
+ Returns:
51
+ bool: True if processing was successful, False otherwise
52
+ """
53
+ try:
54
+ # Detect file changes
55
+ new_files, modified_files, deleted_files = detect_file_changes(client, config.fabric_patterns_folder)
56
+
57
+ # Log the results
58
+ if new_files:
59
+ logger.info(f"New files: {[file['filename'] for file in new_files]}")
60
+ if modified_files:
61
+ logger.info(f"Modified files: {[file['filename'] for file in modified_files]}")
62
+ if deleted_files:
63
+ logger.info(f"Deleted files: {deleted_files}")
64
+
65
+ # Update database if there are changes
66
+ if any([new_files, modified_files, deleted_files]):
67
+ logger.info("Changes detected. Updating database...")
68
+ update_qdrant_database(client, new_files, modified_files, deleted_files)
69
+
70
+ # Always generate output files to ensure consistency
71
+ generate_yaml_file(client, config.yaml_output_folder)
72
+
73
+ return True
74
+
75
+ except Exception as e:
76
+ logger.error(f"Error processing changes: {str(e)}", exc_info=True)
77
+ return False
78
+
79
+ def main() -> Optional[int]:
80
+ """Main application entry point.
81
+
82
+ Returns:
83
+ Optional[int]: Exit code, None if successful, 1 if error
84
+ """
85
+ # Setup signal handlers
86
+ signal.signal(signal.SIGINT, signal_handler)
87
+ signal.signal(signal.SIGTERM, signal_handler)
88
+
89
+ try:
90
+ logger.info("Fabric to Espanso conversion process started")
91
+
92
+ # Log configuration
93
+ logger.info(f"Using configuration:")
94
+ logger.info(f" Database URL: {config.database.url}")
95
+ logger.info(f" Fabric patterns folder: {config.fabric_patterns_folder}")
96
+ logger.info(f" YAML output folder: {config.yaml_output_folder}")
97
+ logger.info(f" Obsidian textgenerator markdown output folder: {config.markdown_output_folder}")
98
+ logger.info(f" Obsidian personal prompts input folder: {config.obsidian_input_folder}")
99
+
100
+ # Process changes with managed client
101
+ with managed_qdrant_client() as client:
102
+ if process_changes(client):
103
+ logger.info("Fabric to Espanso conversion completed successfully")
104
+ return None
105
+ else:
106
+ logger.error("Fabric to Espanso conversion completed with errors")
107
+ return 1
108
+
109
+ except GracefulExit:
110
+ logger.info("Gracefully shutting down...")
111
+ return None
112
+ except (DatabaseConnectionError, DatabaseInitializationError) as e:
113
+ logger.error(f"Database error: {str(e)}")
114
+ return 1
115
+ except Exception as e:
116
+ logger.error(f"Unexpected error: {str(e)}", exc_info=True)
117
+ return 1
118
+
119
+ if __name__ == "__main__":
120
+ sys.exit(main() or 0)
parameters.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Checks on input data are done in config.py
2
+ These parameters must be loaded in the script using config.py"""
3
+ import os
4
+
5
+ #
6
+ # Initialize some automated variables
7
+ # only needed for updating the database and writing the YAML espanso file
8
+ # and the markdown Obsidian files
9
+ # So not necessary for running the streamlit app with query only
10
+ # These automated variables don't work in the cloud obviously
11
+ # because the cloud doesn't have a local filesystem
12
+ # Therefore we first check if we are running in a local WSL environment
13
+ #
14
+ # Project root directory
15
+ is_wsl = os.environ.get('WSL_DISTRO_NAME') is not None
16
+
17
+ if is_wsl:
18
+ PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
19
+
20
+ # Get Windows user profile path
21
+ import subprocess
22
+ windows_user = subprocess.check_output(['cmd.exe', '/c', 'echo %USERNAME%'], text=True).strip()
23
+ #
24
+ # User parameters
25
+ #
26
+ # Location of input and output files
27
+ # TODO: make us of ~ possible in setting of path
28
+ FABRIC_PATTERNS_FOLDER="/home/jelle/.config/fabric/patterns"
29
+ OBSIDIAN_OUTPUT_FOLDER="/mnt/c/Obsidian/BrainCave/Extra/textgenerator/templates/fabric"
30
+ OBSIDIAN_INPUT_FOLDER="/mnt/c/Obsidian/BrainCave/d5 WDODelta/50-59 Programmeren en development/56 Generative AI en LLM/56.15 PromptsLibrary"
31
+ YAML_OUTPUT_FOLDER=f"/mnt/c/Users/{windows_user}/AppData/Roaming/espanso/match"
32
+ else:
33
+ windows_user = "cloud_dummy"
34
+ FABRIC_PATTERNS_FOLDER="cloud_dummy"
35
+ OBSIDIAN_OUTPUT_FOLDER="cloud_dummy"
36
+ OBSIDIAN_INPUT_FOLDER="cloud_dummy"
37
+ YAML_OUTPUT_FOLDER="cloud_dummy"
38
+
39
+ # Headings to extract from markdown files
40
+ BASE_WORDS = ['Identity', 'Purpose', 'Task', 'Goal']
41
+
42
+
43
+ # Qdrant database parameters
44
+ # TODO: deze paramater wordt nu niet in het script gebruikt, is nu hard coded, dit moet wel gebruikt worden
45
+ # Local:
46
+ # QDRANT_URL = "http://localhost:6333"
47
+ # COLLECTION_NAME = "fabric_patterns"
48
+ # Cloud:
49
+ QDRANT_URL = "https://91ed3a93-6135-4951-a624-1c8c2878240d.europe-west3-0.gcp.cloud.qdrant.io:6333"
50
+ COLLECTION_NAME = "fabric_patterns"
51
+
52
+ # Required fields for database points
53
+ # TODO: default trigger wordt nu twee keer gedefinieerd, oplossen
54
+ DEFAULT_TRIGGER = ";;fab"
55
+ REQUIRED_FIELDS = ['filename', 'content', 'purpose', 'filesize', 'trigger']
56
+ REQUIRED_FIELDS_DEFAULTS = {
57
+ 'trigger': ';;fab',
58
+ 'filesize': 0,
59
+ 'purpose': None # Will be set to content if missing
60
+ }
61
+
62
+ # Embedding Model parameters voor Qdrant
63
+ USE_FASTEMBED = True
64
+ EMBED_MODEL = "fast-bge-small-en"
pdm.lock ADDED
The diff for this file is too large to render. See raw diff
 
pyproject.toml ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "fabric-to-espanso"
3
+ version = "0.1.0"
4
+ description = "Default template for PDM package"
5
+ authors = [
6
+ {name = "Hopsakee", email = "[email protected]"},
7
+ ]
8
+ packages = [
9
+ {include = "src"},
10
+ ]
11
+ dependencies = [
12
+ "ipykernel>=6.29.5",
13
+ "markdown>=3.7",
14
+ "pyyaml>=6.0.2",
15
+ "qdrant-client>=1.12.1",
16
+ "fastembed>=0.4.2",
17
+ "streamlit>=1.41.1",
18
+ "pyperclip>=1.9.0",
19
+ "regex>=2024.11.6",
20
+ "fastcore>=1.7.28",
21
+ "setuptools>=75.8.0",
22
+ "gradio>=5.12.0",
23
+ "python-dotenv>=1.0.1",
24
+ ]
25
+ requires-python = "==3.11.*"
26
+ readme = "README.md"
27
+ license = {text = "MIT"}
28
+
29
+
30
+ [tool.pdm]
31
+ package-dir = "src"
32
+ distribution = false
33
+
34
+ [build-system]
35
+ requires = ["pdm-backend"]
36
+ build-backend = "pdm.backend"
requirements.txt ADDED
@@ -0,0 +1,548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is @generated by PDM.
2
+ # Please do not edit it manually.
3
+
4
+ altair==5.5.0 \
5
+ --hash=sha256:91a310b926508d560fe0148d02a194f38b824122641ef528113d029fcd129f8c \
6
+ --hash=sha256:d960ebe6178c56de3855a68c47b516be38640b73fb3b5111c2a9ca90546dd73d
7
+ annotated-types==0.7.0 \
8
+ --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
9
+ --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
10
+ anyio==4.7.0 \
11
+ --hash=sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48 \
12
+ --hash=sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352
13
+ appnope==0.1.4; platform_system == "Darwin" \
14
+ --hash=sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee \
15
+ --hash=sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c
16
+ asttokens==2.4.1 \
17
+ --hash=sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24 \
18
+ --hash=sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0
19
+ attrs==24.3.0 \
20
+ --hash=sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff \
21
+ --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308
22
+ blinker==1.9.0 \
23
+ --hash=sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf \
24
+ --hash=sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc
25
+ cachetools==5.5.0 \
26
+ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \
27
+ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a
28
+ certifi==2024.8.30 \
29
+ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \
30
+ --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9
31
+ cffi==1.17.1; implementation_name == "pypy" \
32
+ --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \
33
+ --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \
34
+ --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \
35
+ --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \
36
+ --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \
37
+ --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \
38
+ --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \
39
+ --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \
40
+ --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \
41
+ --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \
42
+ --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \
43
+ --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \
44
+ --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b
45
+ charset-normalizer==3.4.0 \
46
+ --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
47
+ --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
48
+ --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
49
+ --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
50
+ --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
51
+ --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
52
+ --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
53
+ --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
54
+ --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
55
+ --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
56
+ --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
57
+ --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
58
+ --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
59
+ --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
60
+ --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
61
+ --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
62
+ --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079
63
+ click==8.1.7 \
64
+ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
65
+ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
66
+ colorama==0.4.6; platform_system == "Windows" or sys_platform == "win32" \
67
+ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
68
+ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
69
+ coloredlogs==15.0.1; python_version >= "3.10" and python_version < "3.13" \
70
+ --hash=sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934 \
71
+ --hash=sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0
72
+ comm==0.2.2 \
73
+ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \
74
+ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3
75
+ debugpy==1.8.9 \
76
+ --hash=sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e \
77
+ --hash=sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70 \
78
+ --hash=sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66 \
79
+ --hash=sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040 \
80
+ --hash=sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899 \
81
+ --hash=sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d
82
+ decorator==5.1.1 \
83
+ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \
84
+ --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186
85
+ executing==2.1.0 \
86
+ --hash=sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf \
87
+ --hash=sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab
88
+ fastcore==1.7.28 \
89
+ --hash=sha256:606e4507eb4b8892e4c83ddf5462fbcf32f4bde4fa6caf56ca67ee5e2dbe2b1e \
90
+ --hash=sha256:ffa1ab1b34518795a4342b85ebb9cd2b30588210c21df028a11e420678a59e20
91
+ fastembed==0.5.0 \
92
+ --hash=sha256:420e42ced462e44878065ce13f812485f9788f9f4f1cfd73e075add728d17a70 \
93
+ --hash=sha256:a1a242ca9ffec866cd0336a435e3169ffb7dd60d67b3f61dbb6683f74e3856cf
94
+ filelock==3.16.1 \
95
+ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
96
+ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
97
+ flatbuffers==24.3.25; python_version >= "3.10" and python_version < "3.13" \
98
+ --hash=sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812 \
99
+ --hash=sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4
100
+ fsspec==2024.10.0 \
101
+ --hash=sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871 \
102
+ --hash=sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493
103
+ gitdb==4.0.11 \
104
+ --hash=sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 \
105
+ --hash=sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b
106
+ gitpython==3.1.43 \
107
+ --hash=sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c \
108
+ --hash=sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff
109
+ grpcio==1.68.1 \
110
+ --hash=sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600 \
111
+ --hash=sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515 \
112
+ --hash=sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054 \
113
+ --hash=sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73 \
114
+ --hash=sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c \
115
+ --hash=sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2 \
116
+ --hash=sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd \
117
+ --hash=sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9 \
118
+ --hash=sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe \
119
+ --hash=sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0
120
+ grpcio-tools==1.68.1 \
121
+ --hash=sha256:02f04de42834129eb54bb12469160ab631a0395d6a2b77975381c02b994086c3 \
122
+ --hash=sha256:12239cf5ca6b7b4937103953cf35c49683d935e32e98596fe52dd35168aa86e6 \
123
+ --hash=sha256:1f0ac6ac5e1e33b998511981b3ef36489501833413354f3597b97a3452d7d7ba \
124
+ --hash=sha256:2114528723d9f12d3e24af3d433ec6f140deea1dd64d3bb1b4ebced217f1867c \
125
+ --hash=sha256:21815d54a83effbd2600d16382a7897298cfeffe578557fc9a47b642cc8ddafe \
126
+ --hash=sha256:2413a17ad16c9c821b36e4a67fc64c37b9e4636ab1c3a07778018801378739ba \
127
+ --hash=sha256:28e0bca3a262af86557f30e30ddf2fadc2324ee05cd7352716924cc7f83541f1 \
128
+ --hash=sha256:8e48d8884fcf6b182c73d0560a183404458e30a0f479918b88ca8fbd48b8b05f \
129
+ --hash=sha256:92b6aab37095879ef9ee428dd171740ff794f4c7a66bc1cc7280cd0051f8cd96 \
130
+ --hash=sha256:e4e8059469847441855322da16fa2c0f9787b996c237a98778210e31188a8652
131
+ h11==0.14.0 \
132
+ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \
133
+ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761
134
+ h2==4.1.0 \
135
+ --hash=sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d \
136
+ --hash=sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb
137
+ hpack==4.0.0 \
138
+ --hash=sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c \
139
+ --hash=sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095
140
+ httpcore==1.0.7 \
141
+ --hash=sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c \
142
+ --hash=sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd
143
+ httpx[http2]==0.28.1 \
144
+ --hash=sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc \
145
+ --hash=sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad
146
+ huggingface-hub==0.26.5 \
147
+ --hash=sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b \
148
+ --hash=sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924
149
+ humanfriendly==10.0; python_version >= "3.10" and python_version < "3.13" \
150
+ --hash=sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477 \
151
+ --hash=sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc
152
+ hyperframe==6.0.1 \
153
+ --hash=sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15 \
154
+ --hash=sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914
155
+ idna==3.10 \
156
+ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
157
+ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
158
+ ipykernel==6.29.5 \
159
+ --hash=sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5 \
160
+ --hash=sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215
161
+ ipython==8.29.0 \
162
+ --hash=sha256:0188a1bd83267192123ccea7f4a8ed0a78910535dbaa3f37671dca76ebd429c8 \
163
+ --hash=sha256:40b60e15b22591450eef73e40a027cf77bd652e757523eebc5bd7c7c498290eb
164
+ jedi==0.19.2 \
165
+ --hash=sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0 \
166
+ --hash=sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9
167
+ jinja2==3.1.4 \
168
+ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
169
+ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
170
+ jsonschema==4.23.0 \
171
+ --hash=sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4 \
172
+ --hash=sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566
173
+ jsonschema-specifications==2024.10.1 \
174
+ --hash=sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272 \
175
+ --hash=sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf
176
+ jupyter-client==8.6.3 \
177
+ --hash=sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419 \
178
+ --hash=sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f
179
+ jupyter-core==5.7.2 \
180
+ --hash=sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409 \
181
+ --hash=sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9
182
+ loguru==0.7.3 \
183
+ --hash=sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6 \
184
+ --hash=sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c
185
+ markdown==3.7 \
186
+ --hash=sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2 \
187
+ --hash=sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803
188
+ markdown-it-py==3.0.0 \
189
+ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
190
+ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
191
+ markupsafe==3.0.2 \
192
+ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \
193
+ --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \
194
+ --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \
195
+ --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \
196
+ --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \
197
+ --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \
198
+ --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \
199
+ --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \
200
+ --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \
201
+ --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \
202
+ --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0
203
+ matplotlib-inline==0.1.7 \
204
+ --hash=sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90 \
205
+ --hash=sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca
206
+ mdurl==0.1.2 \
207
+ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
208
+ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
209
+ mmh3==4.1.0 \
210
+ --hash=sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106 \
211
+ --hash=sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec \
212
+ --hash=sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f \
213
+ --hash=sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6 \
214
+ --hash=sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb \
215
+ --hash=sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13 \
216
+ --hash=sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14 \
217
+ --hash=sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e \
218
+ --hash=sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db \
219
+ --hash=sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea \
220
+ --hash=sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896 \
221
+ --hash=sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6 \
222
+ --hash=sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a \
223
+ --hash=sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9 \
224
+ --hash=sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0 \
225
+ --hash=sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560 \
226
+ --hash=sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f
227
+ mpmath==1.3.0; python_version >= "3.10" and python_version < "3.13" \
228
+ --hash=sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f \
229
+ --hash=sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c
230
+ narwhals==1.18.4 \
231
+ --hash=sha256:b1da4e2e4ab185824781760319ac1ec8ee2944a929795064c3a64ffff16b00c4 \
232
+ --hash=sha256:c6bb6b6fba59caeab28a7d6ec1e79ab0040c75baef2e4152199ad1a9c266ef96
233
+ nest-asyncio==1.6.0 \
234
+ --hash=sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe \
235
+ --hash=sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c
236
+ numpy==2.2.0 \
237
+ --hash=sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608 \
238
+ --hash=sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90 \
239
+ --hash=sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0 \
240
+ --hash=sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73 \
241
+ --hash=sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da \
242
+ --hash=sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74 \
243
+ --hash=sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410 \
244
+ --hash=sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d \
245
+ --hash=sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6 \
246
+ --hash=sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b \
247
+ --hash=sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e
248
+ onnx==1.17.0 \
249
+ --hash=sha256:081ec43a8b950171767d99075b6b92553901fa429d4bc5eb3ad66b36ef5dbe3a \
250
+ --hash=sha256:48ca1a91ff73c1d5e3ea2eef20ae5d0e709bb8a2355ed798ffc2169753013fd3 \
251
+ --hash=sha256:4a183c6178be001bf398260e5ac2c927dc43e7746e8638d6c05c20e321f8c949 \
252
+ --hash=sha256:95c03e38671785036bb704c30cd2e150825f6ab4763df3a4f1d249da48525957 \
253
+ --hash=sha256:d6fc3a03fc0129b8b6ac03f03bc894431ffd77c7d79ec023d0afd667b4d35869 \
254
+ --hash=sha256:f01a4b63d4e1d8ec3e2f069e7b798b2955810aa434f7361f01bc8ca08d69cce4
255
+ onnxruntime==1.19.2; python_version >= "3.10" and python_version < "3.13" \
256
+ --hash=sha256:1c3e5d415b78337fa0b1b75291e9ea9fb2a4c1f148eb5811e7212fed02cfffa8 \
257
+ --hash=sha256:50cbb8dc69d6befad4746a69760e5b00cc3ff0a59c6c3fb27f8afa20e2cab7e7 \
258
+ --hash=sha256:a36511dc07c5c964b916697e42e366fa43c48cdb3d3503578d78cef30417cb84 \
259
+ --hash=sha256:c1dfe4f660a71b31caa81fc298a25f9612815215a47b286236e61d540350d7b6 \
260
+ --hash=sha256:d863e8acdc7232d705d49e41087e10b274c42f09e259016a46f32c34e06dc4fd
261
+ packaging==24.2 \
262
+ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
263
+ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
264
+ pandas==2.2.3 \
265
+ --hash=sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32 \
266
+ --hash=sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5 \
267
+ --hash=sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667 \
268
+ --hash=sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3 \
269
+ --hash=sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039 \
270
+ --hash=sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd \
271
+ --hash=sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc \
272
+ --hash=sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698
273
+ parso==0.8.4 \
274
+ --hash=sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18 \
275
+ --hash=sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d
276
+ pexpect==4.9.0; sys_platform != "win32" and sys_platform != "emscripten" \
277
+ --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \
278
+ --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f
279
+ pillow==10.4.0 \
280
+ --hash=sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c \
281
+ --hash=sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06 \
282
+ --hash=sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696 \
283
+ --hash=sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d \
284
+ --hash=sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3 \
285
+ --hash=sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6 \
286
+ --hash=sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496 \
287
+ --hash=sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319 \
288
+ --hash=sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe \
289
+ --hash=sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91 \
290
+ --hash=sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be \
291
+ --hash=sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22
292
+ platformdirs==4.3.6 \
293
+ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
294
+ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
295
+ portalocker==2.10.1 \
296
+ --hash=sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf \
297
+ --hash=sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f
298
+ prompt-toolkit==3.0.48 \
299
+ --hash=sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90 \
300
+ --hash=sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e
301
+ protobuf==5.29.1 \
302
+ --hash=sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34 \
303
+ --hash=sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110 \
304
+ --hash=sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0 \
305
+ --hash=sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb \
306
+ --hash=sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d \
307
+ --hash=sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155 \
308
+ --hash=sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18
309
+ psutil==6.1.0 \
310
+ --hash=sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e \
311
+ --hash=sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e \
312
+ --hash=sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a \
313
+ --hash=sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b \
314
+ --hash=sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688 \
315
+ --hash=sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38 \
316
+ --hash=sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be \
317
+ --hash=sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a
318
+ ptyprocess==0.7.0; sys_platform != "win32" and sys_platform != "emscripten" \
319
+ --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \
320
+ --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220
321
+ pure-eval==0.2.3 \
322
+ --hash=sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0 \
323
+ --hash=sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42
324
+ py-rust-stemmers==0.1.3 \
325
+ --hash=sha256:02b347ab8fe686a88aef0432060471d501b37a6b9a868e7c50bffcd382269cf2 \
326
+ --hash=sha256:2d8b8e6b6d5839a168dae510a00ff4662c7d0a22d12f24fe81caa0ac59265711 \
327
+ --hash=sha256:47211ac6252eb484f5067d30b1812667936deffcef89b4b0acd2efe881a99aed \
328
+ --hash=sha256:658784c0072f7aae67c726be9acac40dd27b29416356c63a3a760a9499a93513 \
329
+ --hash=sha256:72a7b810d8d376c03f0ccebe146f04cbf4c6c97bd74e489b0ddf1342eb40970c \
330
+ --hash=sha256:9fbbb37e0df579859b42b3f850aa08fe829d190d32c6338349eccb0e762b74c6 \
331
+ --hash=sha256:ad796d47874181a25addb505a04245e34620bd7a0c5055671f52d9ce993253e2 \
332
+ --hash=sha256:d4a65b429eb1282934a1cc3c1b2698ae32a6dc00d6be00dd747e688c642eb110 \
333
+ --hash=sha256:d6f9790fe1e9962787817b1894486df7e0b5fc59e4adad423e189530530fae11 \
334
+ --hash=sha256:e6afcd19da56d4182eecb43bdb6c5b9686370063f2538df877fc23f1d16f909e \
335
+ --hash=sha256:fd5d7388f807f584b4c55bfbe608ef40cff0024c1dc54de95d28265395065d02
336
+ pyarrow==18.1.0 \
337
+ --hash=sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe \
338
+ --hash=sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c \
339
+ --hash=sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73 \
340
+ --hash=sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a \
341
+ --hash=sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6 \
342
+ --hash=sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854 \
343
+ --hash=sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0 \
344
+ --hash=sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21
345
+ pycparser==2.22; implementation_name == "pypy" \
346
+ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \
347
+ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc
348
+ pydantic==2.10.3 \
349
+ --hash=sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d \
350
+ --hash=sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9
351
+ pydantic-core==2.27.1 \
352
+ --hash=sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919 \
353
+ --hash=sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9 \
354
+ --hash=sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c \
355
+ --hash=sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5 \
356
+ --hash=sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e \
357
+ --hash=sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235 \
358
+ --hash=sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52 \
359
+ --hash=sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89 \
360
+ --hash=sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de \
361
+ --hash=sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c \
362
+ --hash=sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330 \
363
+ --hash=sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8 \
364
+ --hash=sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025 \
365
+ --hash=sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4 \
366
+ --hash=sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc
367
+ pydeck==0.9.1 \
368
+ --hash=sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038 \
369
+ --hash=sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605
370
+ pygments==2.18.0 \
371
+ --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
372
+ --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
373
+ pyperclip==1.9.0 \
374
+ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310
375
+ pyreadline3==3.5.4; sys_platform == "win32" and python_version >= "3.10" and python_version < "3.13" \
376
+ --hash=sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7 \
377
+ --hash=sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6
378
+ python-dateutil==2.9.0.post0 \
379
+ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
380
+ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
381
+ pytz==2024.2 \
382
+ --hash=sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a \
383
+ --hash=sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725
384
+ pywin32==308; sys_platform == "win32" and platform_python_implementation != "PyPy" or platform_system == "Windows" \
385
+ --hash=sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6 \
386
+ --hash=sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b \
387
+ --hash=sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a
388
+ pyyaml==6.0.2 \
389
+ --hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \
390
+ --hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \
391
+ --hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \
392
+ --hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \
393
+ --hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \
394
+ --hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \
395
+ --hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \
396
+ --hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \
397
+ --hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \
398
+ --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
399
+ pyzmq==26.2.0 \
400
+ --hash=sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f \
401
+ --hash=sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37 \
402
+ --hash=sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3 \
403
+ --hash=sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4 \
404
+ --hash=sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5 \
405
+ --hash=sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6 \
406
+ --hash=sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e \
407
+ --hash=sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317 \
408
+ --hash=sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef \
409
+ --hash=sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218 \
410
+ --hash=sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf \
411
+ --hash=sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003 \
412
+ --hash=sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4
413
+ qdrant-client==1.12.2 \
414
+ --hash=sha256:2777e09b3e89bb22bb490384d8b1fa8140f3915287884f18984f7031a346aba5 \
415
+ --hash=sha256:a0ae500a46a679ff3521ba3f1f1cf3d72b57090a768cec65fc317066bcbac1e6
416
+ referencing==0.35.1 \
417
+ --hash=sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c \
418
+ --hash=sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de
419
+ regex==2024.11.6 \
420
+ --hash=sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60 \
421
+ --hash=sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d \
422
+ --hash=sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114 \
423
+ --hash=sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3 \
424
+ --hash=sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d \
425
+ --hash=sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7 \
426
+ --hash=sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f \
427
+ --hash=sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34 \
428
+ --hash=sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638 \
429
+ --hash=sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519 \
430
+ --hash=sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20 \
431
+ --hash=sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89 \
432
+ --hash=sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45 \
433
+ --hash=sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55 \
434
+ --hash=sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9 \
435
+ --hash=sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0
436
+ requests==2.32.3 \
437
+ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
438
+ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
439
+ rich==13.9.4 \
440
+ --hash=sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098 \
441
+ --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90
442
+ rpds-py==0.22.3 \
443
+ --hash=sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2 \
444
+ --hash=sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a \
445
+ --hash=sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d \
446
+ --hash=sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1 \
447
+ --hash=sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a \
448
+ --hash=sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0 \
449
+ --hash=sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c \
450
+ --hash=sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64 \
451
+ --hash=sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb \
452
+ --hash=sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f \
453
+ --hash=sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d \
454
+ --hash=sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74 \
455
+ --hash=sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648 \
456
+ --hash=sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5
457
+ setuptools==75.8.0 \
458
+ --hash=sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6 \
459
+ --hash=sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3
460
+ six==1.16.0 \
461
+ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
462
+ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
463
+ smmap==5.0.1 \
464
+ --hash=sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62 \
465
+ --hash=sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da
466
+ sniffio==1.3.1 \
467
+ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
468
+ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
469
+ stack-data==0.6.3 \
470
+ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \
471
+ --hash=sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695
472
+ streamlit==1.41.1 \
473
+ --hash=sha256:0def00822480071d642e6df36cd63c089f991da3a69fd9eb4ab8f65ce27de4e0 \
474
+ --hash=sha256:6626d32b098ba1458b71eebdd634c62af2dd876380e59c4b6a1e828a39d62d69
475
+ sympy==1.13.3; python_version >= "3.10" and python_version < "3.13" \
476
+ --hash=sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73 \
477
+ --hash=sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9
478
+ tenacity==9.0.0 \
479
+ --hash=sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b \
480
+ --hash=sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539
481
+ tokenizers==0.21.0 \
482
+ --hash=sha256:089d56db6782a73a27fd8abf3ba21779f5b85d4a9f35e3b493c7bbcbbf0d539b \
483
+ --hash=sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2 \
484
+ --hash=sha256:400832c0904f77ce87c40f1a8a27493071282f785724ae62144324f171377273 \
485
+ --hash=sha256:4145505a973116f91bc3ac45988a92e618a6f83eb458f49ea0790df94ee243ff \
486
+ --hash=sha256:6b177fb54c4702ef611de0c069d9169f0004233890e0c4c5bd5508ae05abf193 \
487
+ --hash=sha256:6b43779a269f4629bebb114e19c3fca0223296ae9fea8bb9a7a6c6fb0657ff8e \
488
+ --hash=sha256:87841da5a25a3a5f70c102de371db120f41873b854ba65e52bccd57df5a3780c \
489
+ --hash=sha256:9aeb255802be90acfd363626753fda0064a8df06031012fe7d52fd9a905eb00e \
490
+ --hash=sha256:c87ca3dc48b9b1222d984b6b7490355a6fdb411a2d810f6f05977258400ddb74 \
491
+ --hash=sha256:d8b09dbeb7a8d73ee204a70f94fc06ea0f17dcf0844f16102b9f414f0b7463ba \
492
+ --hash=sha256:e84ca973b3a96894d1707e189c14a774b701596d579ffc7e69debfc036a61a04 \
493
+ --hash=sha256:eb1702c2f27d25d9dd5b389cc1f2f51813e99f8ca30d9e25348db6585a97e24a \
494
+ --hash=sha256:eb7202d231b273c34ec67767378cd04c767e967fda12d4a9e36208a34e2f137e \
495
+ --hash=sha256:ee0894bf311b75b0c03079f33859ae4b2334d675d4e93f5a4132e1eae2834fe4 \
496
+ --hash=sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e
497
+ toml==0.10.2 \
498
+ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
499
+ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f
500
+ tornado==6.4.2 \
501
+ --hash=sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803 \
502
+ --hash=sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec \
503
+ --hash=sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482 \
504
+ --hash=sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634 \
505
+ --hash=sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38 \
506
+ --hash=sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b \
507
+ --hash=sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c \
508
+ --hash=sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf \
509
+ --hash=sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946 \
510
+ --hash=sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73 \
511
+ --hash=sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1
512
+ tqdm==4.67.1 \
513
+ --hash=sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2 \
514
+ --hash=sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2
515
+ traitlets==5.14.3 \
516
+ --hash=sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7 \
517
+ --hash=sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f
518
+ typing-extensions==4.12.2 \
519
+ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
520
+ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
521
+ tzdata==2024.2 \
522
+ --hash=sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc \
523
+ --hash=sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd
524
+ urllib3==2.2.3 \
525
+ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \
526
+ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9
527
+ watchdog==6.0.0; platform_system != "Darwin" \
528
+ --hash=sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a \
529
+ --hash=sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2 \
530
+ --hash=sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f \
531
+ --hash=sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c \
532
+ --hash=sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c \
533
+ --hash=sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13 \
534
+ --hash=sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e \
535
+ --hash=sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379 \
536
+ --hash=sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282 \
537
+ --hash=sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f \
538
+ --hash=sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c \
539
+ --hash=sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680 \
540
+ --hash=sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26 \
541
+ --hash=sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2
542
+ wcwidth==0.2.13 \
543
+ --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \
544
+ --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5
545
+ win32-setctime==1.2.0; sys_platform == "win32" \
546
+ --hash=sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390 \
547
+ --hash=sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0
548
+
setup.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from setuptools import setup, find_packages
2
+
3
+ setup(
4
+ name="fabric-to-espanso",
5
+ packages=find_packages(),
6
+ include_package_data=True,
7
+ )
src/__init__.py ADDED
File without changes
src/fabrics_processor/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .database import initialize_qdrant_database
src/fabrics_processor/config.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration management for fabric-to-espanso."""
2
+ # TODO: check if config.validate wel gebruikt wordt
3
+
4
+ from typing import Dict, Any, Optional
5
+ from dataclasses import dataclass, field
6
+ from urllib.parse import urlparse
7
+ from pathlib import Path
8
+ import logging
9
+
10
+ from parameters import (
11
+ FABRIC_PATTERNS_FOLDER,
12
+ YAML_OUTPUT_FOLDER,
13
+ DEFAULT_TRIGGER,
14
+ OBSIDIAN_OUTPUT_FOLDER,
15
+ OBSIDIAN_INPUT_FOLDER,
16
+ BASE_WORDS,
17
+ QDRANT_URL,
18
+ USE_FASTEMBED,
19
+ EMBED_MODEL,
20
+ COLLECTION_NAME,
21
+ REQUIRED_FIELDS,
22
+ REQUIRED_FIELDS_DEFAULTS
23
+ )
24
+
25
+ logger = logging.getLogger('fabric_to_espanso')
26
+
27
+ @dataclass
28
+ class DatabaseConfig:
29
+ """Database configuration settings."""
30
+ url: str = QDRANT_URL
31
+ max_retries: int = 3
32
+ retry_delay: float = 1.0
33
+ timeout: float = 10.0
34
+ api_key: Optional[str] = None
35
+ required_fields: list = field(default_factory=lambda: REQUIRED_FIELDS)
36
+ required_fields_defaults: dict = field(default_factory=lambda: REQUIRED_FIELDS_DEFAULTS)
37
+
38
+
39
+ def validate(self) -> None:
40
+ """Validate the database configuration.
41
+
42
+ Raises:
43
+ ConfigurationError: If any configuration values are invalid.
44
+ """
45
+ try:
46
+ result = urlparse(self.url)
47
+ if not all([result.scheme, result.netloc]):
48
+ raise ValueError(f"Invalid database URL: {self.url}")
49
+
50
+ if self.max_retries < 0:
51
+ raise ValueError(f"max_retries must be >= 0, got {self.max_retries}")
52
+
53
+ if self.retry_delay <= 0:
54
+ raise ValueError(f"retry_delay must be > 0, got {self.retry_delay}")
55
+
56
+ if self.timeout <= 0:
57
+ raise ValueError(f"timeout must be > 0, got {self.timeout}")
58
+
59
+ except ValueError as e:
60
+ from .exceptions import ConfigurationError
61
+ raise ConfigurationError(str(e))
62
+
63
+ @dataclass
64
+ class EmbeddingConfig:
65
+ """Embedding model configuration."""
66
+ use_fastembed: bool = USE_FASTEMBED
67
+ model_name: str = EMBED_MODEL
68
+ collection_name: str = COLLECTION_NAME
69
+ vector_size: int = 384
70
+
71
+ def validate(self) -> None:
72
+ """Validate the embedding configuration."""
73
+ if not self.model_name:
74
+ from .exceptions import ConfigurationError
75
+ raise ConfigurationError("Embedding model name cannot be empty")
76
+
77
+ if self.vector_size <= 0:
78
+ from .exceptions import ConfigurationError
79
+ raise ConfigurationError(f"Vector size must be > 0, got {self.vector_size}")
80
+
81
+ class Config:
82
+ """Global configuration singleton."""
83
+ _instance: Optional['Config'] = None
84
+
85
+ def __new__(cls) -> 'Config':
86
+ if cls._instance is None:
87
+ cls._instance = super().__new__(cls)
88
+ cls._instance.database = DatabaseConfig()
89
+ cls._instance.embedding = EmbeddingConfig()
90
+ cls._instance.espanso_trigger = DEFAULT_TRIGGER
91
+ cls._instance.fabric_patterns_folder = FABRIC_PATTERNS_FOLDER
92
+ cls._instance.yaml_output_folder = YAML_OUTPUT_FOLDER
93
+ cls._instance.obsidian_output_folder = OBSIDIAN_OUTPUT_FOLDER
94
+ cls._instance.obsidian_input_folder = OBSIDIAN_INPUT_FOLDER
95
+ cls._instance.base_words = BASE_WORDS
96
+ return cls._instance
97
+
98
+ def validate(self) -> None:
99
+ """Validate all configuration settings."""
100
+ self.database.validate()
101
+ self.embedding.validate()
102
+
103
+ # Validate paths
104
+ if not self.espanso_trigger:
105
+ from .exceptions import ConfigurationError
106
+ raise ConfigurationError("The default trigger for espanso patterns cannot be empty")
107
+
108
+ if not self.fabric_patterns_folder:
109
+ from .exceptions import ConfigurationError
110
+ raise ConfigurationError("The fabric patterns folder path cannot be empty")
111
+
112
+ if not self.yaml_output_folder:
113
+ from .exceptions import ConfigurationError
114
+ raise ConfigurationError("YAML output folder path for espanso cannot be empty")
115
+
116
+ if not self.obsidian_output_folder:
117
+ from .exceptions import ConfigurationError
118
+ raise ConfigurationError("Obsidian output folder path to write the files for Obsidian Textgenerator cannot be empty")
119
+
120
+ if not self.obsidian_input_folder:
121
+ from .exceptions import ConfigurationError
122
+ raise ConfigurationError("Obsidian input folder path to find the personal prompts stored in Obsidian cannot be empty")
123
+
124
+ for path in [self.fabric_patterns_folder, self.yaml_output_folder, self.obsidian_output_folder, self.obsidian_input_folder]:
125
+ if not path == "cloud_dummy" and not Path(path).is_dir():
126
+ from .exceptions import ConfigurationError
127
+ raise ConfigurationError(f"{path} is not a valid directory")
128
+ # Global configuration instance
129
+ config = Config()
src/fabrics_processor/database.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Database management for fabric-to-espanso."""
2
+ from typing import Optional, List, Dict
3
+ import logging
4
+ import time
5
+
6
+ from qdrant_client import QdrantClient
7
+ from qdrant_client.http import models, exceptions
8
+ from qdrant_client.http.models import Distance, VectorParams, PointStruct
9
+
10
+ from .config import config
11
+ from .exceptions import DatabaseConnectionError, CollectionError, DatabaseInitializationError, ConfigurationError
12
+
13
+ logger = logging.getLogger('fabric_to_espanso')
14
+
15
+ def create_database_connection(url: Optional[str] = None, api_key: Optional[str] = None) -> QdrantClient:
16
+ """Create a database connection.
17
+
18
+ Args:
19
+ url: Optional database URL. If not provided, uses configuration.
20
+
21
+ Returns:
22
+ QdrantClient: Connected database client
23
+
24
+ Raises:
25
+ DatabaseConnectionError: If connection fails after retries
26
+ """
27
+ url = url or config.database.url
28
+ for attempt in range(config.database.max_retries + 1):
29
+ try:
30
+ client = QdrantClient(
31
+ url=url,
32
+ timeout=config.database.timeout,
33
+ api_key=api_key
34
+ )
35
+ # Test connection
36
+ client.get_collections()
37
+ return client
38
+ except Exception as e:
39
+ if attempt == config.database.max_retries:
40
+ raise DatabaseConnectionError(
41
+ f"Failed to connect to database at {url} after "
42
+ f"{config.database.max_retries} attempts: {str(e)}"
43
+ ) from e
44
+ logger.warning(
45
+ f"Connection attempt {attempt + 1} failed, retrying in "
46
+ f"{config.database.retry_delay} seconds..."
47
+ )
48
+ time.sleep(config.database.retry_delay)
49
+
50
+ def initialize_qdrant_database(
51
+ url: str = config.database.url,
52
+ api_key: Optional[str] = "",
53
+ collection_name: str = config.embedding.collection_name,
54
+ use_fastembed: bool = config.embedding.use_fastembed,
55
+ embed_model: str = config.embedding.model_name
56
+ ) -> QdrantClient:
57
+ """Initialize the Qdrant database for storing markdown file information.
58
+
59
+ Args:
60
+ collection_name: Name of the collection to initialize
61
+ use_fastembed: Whether to use FastEmbed for embeddings
62
+ embed_model: Name of the embedding model to use
63
+
64
+ Returns:
65
+ QdrantClient: Initialized database client
66
+
67
+ Raises:
68
+ DatabaseInitializationError: If initialization fails
69
+ CollectionError: If collection creation fails
70
+ ConfigurationError: If configuration is invalid
71
+ """
72
+ try:
73
+ # Validate configuration
74
+ config.validate()
75
+
76
+ # Create database connection
77
+ client = create_database_connection(url=url, api_key=api_key)
78
+
79
+ # Check if collection exists
80
+ collections = client.get_collections()
81
+ collection_names = [c.name for c in collections.collections]
82
+
83
+ if collection_name not in collection_names:
84
+ logger.info(f"Creating new collection: {collection_name}")
85
+
86
+ # Create collection with appropriate vector configuration
87
+ if use_fastembed:
88
+ vector_config = client.get_fastembed_vector_params()
89
+ else:
90
+ vector_config = {
91
+ embed_model: VectorParams(
92
+ size=config.embedding.vector_size,
93
+ distance=Distance.COSINE
94
+ )
95
+ }
96
+
97
+ try:
98
+ client.create_collection(
99
+ collection_name=collection_name,
100
+ vectors_config=vector_config,
101
+ on_disk_payload=True
102
+ )
103
+ except exceptions.UnexpectedResponse as e:
104
+ raise CollectionError(
105
+ f"Failed to create collection {collection_name}: {str(e)}"
106
+ ) from e
107
+
108
+ # Create indexes for efficient searching
109
+ for field_name, field_type in [
110
+ ("filename", models.PayloadSchemaType.KEYWORD),
111
+ ("date", models.PayloadSchemaType.DATETIME)
112
+ ]:
113
+ client.create_payload_index(
114
+ collection_name=collection_name,
115
+ field_name=field_name,
116
+ field_schema=field_type
117
+ )
118
+ logger.info(f"Created indexes for collection {collection_name}")
119
+
120
+ # Log collection status
121
+ collection_info = client.get_collection(collection_name)
122
+ logger.info(
123
+ f"Collection {collection_name} ready with "
124
+ f"{collection_info.points_count} points"
125
+ )
126
+
127
+ return client
128
+
129
+ except Exception as e:
130
+ logger.error(f"Database initialization failed: {str(e)}", exc_info=True)
131
+ if isinstance(e, (DatabaseConnectionError, CollectionError)):
132
+ raise
133
+ raise DatabaseInitializationError(str(e)) from e
134
+
135
+ def validate_database_payload(
136
+ client: QdrantClient,
137
+ collection_name: str,
138
+ ) -> Dict:
139
+ """Validate the payload of all points in the Qdrant database.
140
+
141
+ Args:
142
+ client: Initialized Qdrant client
143
+ collection_name: Name of the collection to validate
144
+ """
145
+
146
+ # First validate existing points in database
147
+ logger.info("Validating existing database points...")
148
+ offset = None
149
+
150
+ while True:
151
+ scroll_result = client.scroll(
152
+ collection_name=collection_name,
153
+ limit=5, # Process in batches of 5
154
+ offset=offset
155
+ )
156
+
157
+ points, offset = scroll_result
158
+
159
+ for point in points:
160
+ try:
161
+ fixed_payload = validate_point_payload(point.payload, point.id)
162
+ if fixed_payload != point.payload:
163
+ # Update point with fixed payload
164
+ point_struct = PointStruct(
165
+ id=point.id,
166
+ vector=point.vector,
167
+ payload=fixed_payload
168
+ )
169
+ client.upsert(collection_name=collection_name, points=[point_struct])
170
+ logger.info(f"Fixed and updated point {point.id} in database")
171
+ except ConfigurationError as e:
172
+ logger.error(str(e))
173
+
174
+ if not offset: # No more points to process
175
+ break
176
+
177
+ logger.info("Database validation completed")
178
+
179
+ def validate_point_payload(payload: dict, point_id: Optional[str] = None) -> dict:
180
+ """Validate and fix point payload fields.
181
+ Only use if somehow many points have become corrupted.
182
+
183
+ Args:
184
+ payload (dict): Point payload to validate
185
+ point_id (str, optional): ID of the point for logging purposes
186
+
187
+ Returns:
188
+ dict: Validated and potentially fixed payload
189
+
190
+ Raises:
191
+ ConfigurationError: If required fields are missing and cannot be fixed
192
+ """
193
+ print(f"Validating point {point_id if point_id else ''}")
194
+ from .exceptions import ConfigurationError
195
+
196
+ # Check for critical fields
197
+ if 'filename' not in payload or 'content' not in payload:
198
+ error_msg = f"Point {point_id if point_id else ''} is missing critical fields: "
199
+ error_msg += "'filename' and/or 'content' are required and cannot be defaulted"
200
+ raise ConfigurationError(error_msg)
201
+
202
+ # Copy payload to avoid modifying the original
203
+ fixed_payload = payload.copy()
204
+
205
+ # Apply defaults and fixes for non-critical fields
206
+ if 'purpose' not in fixed_payload or not fixed_payload['purpose']:
207
+ fixed_payload['purpose'] = fixed_payload['content']
208
+ logger.warning(f"Point {point_id if point_id else ''}: 'purpose' was missing, set to content value")
209
+
210
+ if 'filesize' not in fixed_payload:
211
+ fixed_payload['filesize'] = self.required_fields_defaults['filesize']
212
+ logger.warning(f"Point {point_id if point_id else ''}: 'filesize' was missing, set to {self.required_fields_defaults['filesize']}")
213
+
214
+ if 'trigger' not in fixed_payload:
215
+ fixed_payload['trigger'] = self.required_fields_defaults['trigger']
216
+ logger.warning(f"Point {point_id if point_id else ''}: 'trigger' was missing, set to {self.required_fields_defaults['trigger']}")
217
+
218
+ return fixed_payload
src/fabrics_processor/database_updater.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+ from qdrant_client import QdrantClient
3
+ from qdrant_client.http.models import PointStruct, Filter, FieldCondition, MatchValue, PointIdsList
4
+ from fastembed import TextEmbedding
5
+ import logging
6
+ import uuid
7
+ from .output_files_generator import generate_yaml_file, generate_markdown_files
8
+ from .config import config
9
+ from .exceptions import ConfigurationError
10
+ from .database import validate_point_payload
11
+
12
+ logger = logging.getLogger('fabric_to_espanso')
13
+
14
+ def get_embedding(text: str, embedding_model: TextEmbedding) -> list:
15
+ """
16
+ Generate embedding vector for the given text using FastEmbed.
17
+
18
+ Args:
19
+ text (str): Text to generate embedding for
20
+
21
+ Returns:
22
+ list: Embedding vector
23
+ """
24
+ embeddings = list(embedding_model.embed([text]))
25
+ return embeddings[0].tolist()
26
+
27
+ def update_qdrant_database(client: QdrantClient, collection_name: str, new_files: list, modified_files: list, deleted_files: list):
28
+ """
29
+ Update the Qdrant database based on detected file changes.
30
+
31
+ Args:
32
+ client (QdrantClient): An initialized Qdrant client.
33
+ new_files (list): List of new files to be added to the database.
34
+ modified_files (list): List of modified files to be updated in the database.
35
+ deleted_files (list): List of deleted files to be removed from the database.
36
+ """
37
+
38
+ # Initialize the FastEmbed model (done once)
39
+ if config.embedding.use_fastembed:
40
+ # TODO: I think it is possible to choose another model here. Make that an option
41
+ logger.info(f"Initializing FastEmbed model.")
42
+ embedding_model = TextEmbedding()
43
+ else:
44
+ logger.info(f"Initializing embbeding model: {config.model_name}")
45
+ # TODO: testen. Weet niet of dit werkt.
46
+ embedding_model = TextEmbedding(model_name=config.model_name)
47
+
48
+ try:
49
+ # Add new files
50
+ for file in new_files:
51
+ try:
52
+ payload_new = validate_point_payload(file)
53
+ point = PointStruct(
54
+ id=str(uuid.uuid4()), # Generate a new UUID for each point
55
+ # TODO: 'fast-bge-small-en' is de naam van de vector. Je kunt de naam vinden door: client.get_vector_field_name()
56
+ vector={'fast-bge-small-en':
57
+ get_embedding(payload_new['purpose'], embedding_model)}, # Generate vector from purpose field
58
+ payload={
59
+ "filename": payload_new['filename'],
60
+ "content": payload_new['content'],
61
+ "purpose": payload_new['purpose'],
62
+ "date": payload_new['last_modified'],
63
+ "filesize": payload_new['filesize'],
64
+ "trigger": payload_new['trigger'],
65
+ }
66
+ )
67
+ client.upsert(collection_name=collection_name, points=[point]) # Update the database with the new file
68
+ logger.info(f"Added new file to database: {file['filename']}")
69
+ except ConfigurationError as e:
70
+ logger.error(f"Skipping new file: {str(e)}")
71
+
72
+ # Update modified files
73
+ for file in modified_files:
74
+ try:
75
+ # Query the database to find the point with the matching filename
76
+ scroll_result = client.scroll(
77
+ collection_name=collection_name,
78
+ scroll_filter=Filter(
79
+ must=[FieldCondition(key="filename", match=MatchValue(value=file['filename']))]
80
+ ),
81
+ limit=1
82
+ )[0]
83
+ # TODO: Add handling of cases of multiple entries with the same filename
84
+ if scroll_result:
85
+ point_id = scroll_result[0].id
86
+ payload_current = validate_point_payload(file, point_id)
87
+ # Update the existing point with the new file data
88
+ point = PointStruct(
89
+ id=point_id,
90
+ # LET OP: als je 'fastembed' gebruikt, moet je de naam van de vector gebruiken.
91
+ # In dit geval is de naam 'fast-bge-small-en'.
92
+ # Gebruik je fastembed niet, maar rechtstreeks de QDRANT api, dan kun je ook gebruik maken
93
+ # van unnamed vectors en kun je dus schrrijven vector = get_embedding(file['purpose'], embedding_model)
94
+ # Zie https://github.com/qdrant/qdrant-client/discussions/598
95
+ # De naam die fastembed gebruikt is afhankelijk van het model dat je gebruikt.
96
+ # Je kunt de naam vinden door: client.get_vector_field_name()
97
+ vector={'fast-bge-small-en':
98
+ get_embedding(file['purpose'], embedding_model)}, # Generate vector from purpose field
99
+ payload={
100
+ "filename": payload_current['filename'],
101
+ "content": file['content'],
102
+ "purpose": file['purpose'],
103
+ "date": file['last_modified'],
104
+ "filesize": file['filesize'],
105
+ "trigger": payload_current['trigger'],
106
+ }
107
+ )
108
+ client.upsert(collection_name=collection_name, points=[point])
109
+ logger.info(f"Updated modified file in database: {payload_current['filename']}")
110
+ else:
111
+ logger.warning(f"File not found in database for update: {file['filename']}")
112
+ except ConfigurationError as e:
113
+ logger.error(f"Skipping modified file: {str(e)}")
114
+
115
+ # Delete removed files
116
+ for filename in deleted_files:
117
+ # Query the database to find the point with the matching filename
118
+ scroll_result = client.scroll(
119
+ collection_name=collection_name,
120
+ scroll_filter=Filter(
121
+ must=[FieldCondition(key="filename", match=MatchValue(value=filename))]
122
+ ),
123
+ limit=1
124
+ )[0]
125
+ # TODO: Add handling of cases of multiple entries with the same filename
126
+ if scroll_result:
127
+ point_id = scroll_result[0].id
128
+ client.delete(
129
+ collection_name=collection_name,
130
+ points_selector=PointIdsList(points=[point_id])
131
+ )
132
+ logger.info(f"Deleted file from database: {filename}")
133
+ else:
134
+ logger.warning(f"File not found in database for deletion: {filename}")
135
+
136
+ logger.info("Database update completed successfully")
137
+
138
+ # Generate new YAML file for use with espanso after database update
139
+ print("Generating YAML file...")
140
+ generate_yaml_file(client, config.embedding.collection_name, config.yaml_output_folder)
141
+ # Generate markdown files for use with obsidian after database update
142
+ print("Generating markdown files...")
143
+ generate_markdown_files(client, config.embedding.collection_name, config.obsidian_output_folder)
144
+
145
+ except Exception as e:
146
+ logger.error(f"Error updating Qdrant database: {str(e)}", exc_info=True)
147
+ raise
src/fabrics_processor/exceptions.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Custom exceptions for the fabric-to-espanso application."""
2
+
3
+ class FabricToEspansoError(Exception):
4
+ """Base exception for all fabric-to-espanso errors."""
5
+ pass
6
+
7
+ class DatabaseError(FabricToEspansoError):
8
+ """Base exception for database-related errors."""
9
+ pass
10
+
11
+ class DatabaseConnectionError(DatabaseError):
12
+ """Raised when unable to connect to the database."""
13
+ pass
14
+
15
+ class DatabaseInitializationError(DatabaseError):
16
+ """Raised when database initialization fails."""
17
+ pass
18
+
19
+ class CollectionError(DatabaseError):
20
+ """Raised when there's an error with collection operations."""
21
+ pass
22
+
23
+ class ConfigurationError(FabricToEspansoError):
24
+ """Raised when there's an error in the configuration."""
25
+ pass
26
+
27
+ class NotImplementedError(FabricToEspansoError):
28
+ """Raised when a feature is not implemented."""
29
+ pass
30
+
31
+ class ParsingError(FabricToEspansoError):
32
+ """Raised when there's an error parsing markdown files."""
33
+ pass
34
+
35
+ class ProcessingError(FabricToEspansoError):
36
+ """Raised when there's an error processing the input files."""
37
+ pass
src/fabrics_processor/file_change_detector.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """File change detection for fabric-to-espanso."""
2
+ from typing import List, Tuple, Dict, Any
3
+ from datetime import datetime
4
+ import logging
5
+
6
+ from qdrant_client import QdrantClient
7
+ from qdrant_client.http.models import Filter, FieldCondition, MatchValue
8
+ from qdrant_client.http.exceptions import UnexpectedResponse
9
+
10
+ from .file_processor import process_markdown_files
11
+ from .config import config
12
+ from .exceptions import DatabaseError
13
+
14
+ logger = logging.getLogger('fabric_to_espanso')
15
+
16
+ def get_stored_files(client: QdrantClient, collection_name: str = config.embedding.collection_name) -> Dict[str, Dict[str, Any]]:
17
+ """Get all files stored in the database.
18
+
19
+ Args:
20
+ client: Initialized Qdrant client
21
+ collection_name: Name of the collection to query
22
+
23
+ Returns:
24
+ Dict mapping filenames to their database records
25
+
26
+ Raises:
27
+ DatabaseError: If query fails
28
+ """
29
+ try:
30
+ stored_files = client.scroll(
31
+ collection_name=collection_name,
32
+ limit=10000 # Adjust based on expected file count
33
+ )[0]
34
+ return {
35
+ file.payload['filename']: {
36
+ 'payload': file.payload,
37
+ 'id': file.id,
38
+ 'vector': file.vector
39
+ }
40
+ for file in stored_files
41
+ }
42
+ except UnexpectedResponse as e:
43
+ raise DatabaseError(f"Failed to query stored files: {str(e)}") from e
44
+
45
+ def compare_file_dates(
46
+ current_date: datetime,
47
+ stored_date_str: str
48
+ ) -> bool:
49
+ """Compare file modification dates.
50
+ note: This function isn't used anymore. Replaced by compare_file_sizes.
51
+
52
+ Args:
53
+ current_date: Current file's modification date
54
+ stored_date_str: Stored file's modification date string
55
+
56
+ Returns:
57
+ True if file is modified, False otherwise
58
+ """
59
+ stored_date = datetime.strptime(stored_date_str, '%Y-%m-%dT%H:%M:%S.%f')
60
+ return current_date > stored_date
61
+
62
+ def detect_file_changes(
63
+ client: QdrantClient,
64
+ fabric_patterns_folder: str
65
+ ) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[str]]:
66
+ """Detect changes in markdown files by comparing with database.
67
+
68
+ Args:
69
+ client: Initialized Qdrant client
70
+ markdown_folder: Folder containing markdown files
71
+
72
+ Returns:
73
+ Tuple containing:
74
+ - List of new files
75
+ - List of modified files
76
+ - List of deleted files
77
+
78
+ Raises:
79
+ DatabaseError: If database query fails
80
+ OSError: If file system operations fail
81
+ """
82
+ try:
83
+ # Get current files
84
+ current_files = process_markdown_files(fabric_patterns_folder)
85
+ logger.debug(f"Found {len(current_files)} files in {fabric_patterns_folder}")
86
+
87
+ # Get stored files from database
88
+ stored_files = get_stored_files(client)
89
+ logger.debug(f"Found {len(stored_files)} files in database")
90
+
91
+ # Initialize change lists
92
+ new_files: List[Dict[str, Any]] = []
93
+ modified_files: List[Dict[str, Any]] = []
94
+ deleted_files: List[str] = []
95
+
96
+ # Check for new and modified files
97
+ for file in current_files:
98
+ filename = file['filename']
99
+ if filename not in stored_files:
100
+ logger.debug(f"New file detected: {filename}")
101
+ new_files.append(file)
102
+ # compare on file size, not on modified date, because fabric -U will change the modified date of the file
103
+ # even if the content hasn't changed
104
+ elif file['filesize'] != stored_files[filename]['payload']['filesize']:
105
+ logger.debug(f"Modified file detected: {filename}")
106
+ modified_files.append(file)
107
+
108
+ # Check for deleted files
109
+ current_filenames = {file['filename'] for file in current_files}
110
+ deleted_files = [
111
+ filename for filename in stored_files
112
+ if filename not in current_filenames
113
+ ]
114
+
115
+ if deleted_files:
116
+ logger.debug(f"Deleted files detected: {deleted_files}")
117
+
118
+ # Log summary
119
+ logger.info(
120
+ f"Changes detected:"
121
+ f" {len(new_files)} new,"
122
+ f" {len(modified_files)} modified,"
123
+ f" {len(deleted_files)} deleted"
124
+ )
125
+
126
+ return new_files, modified_files, deleted_files
127
+
128
+ except Exception as e:
129
+ logger.error(f"Error detecting file changes: {str(e)}", exc_info=True)
130
+ if isinstance(e, (DatabaseError, OSError)):
131
+ raise
132
+ raise RuntimeError(f"Unexpected error detecting changes: {str(e)}") from e
src/fabrics_processor/file_processor.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """File processing module for fabric-to-espanso."""
2
+ from pathlib import Path
3
+ from typing import List, Dict, Any, Optional
4
+ from datetime import datetime
5
+ import logging
6
+
7
+ from .markdown_parser import parse_markdown_file
8
+ from .exceptions import ProcessingError
9
+
10
+ logger = logging.getLogger('fabric_to_espanso')
11
+
12
+ def find_markdown_files(
13
+ root_dir: Path,
14
+ max_depth: int = 2,
15
+ pattern: str = "*.md"
16
+ ) -> List[Path]:
17
+ """Find markdown files in directory up to specified depth.
18
+
19
+ Args:
20
+ root_dir: Root directory to search in
21
+ max_depth: Maximum directory depth to search
22
+ pattern: Glob pattern for files to find
23
+
24
+ Returns:
25
+ List of paths to markdown files
26
+
27
+ Raises:
28
+ ValueError: If root_dir doesn't exist or isn't a directory
29
+ """
30
+ if not root_dir.exists():
31
+ raise ValueError(f"Directory does not exist: {root_dir}")
32
+ if not root_dir.is_dir():
33
+ raise ValueError(f"Path is not a directory: {root_dir}")
34
+
35
+ files: List[Path] = []
36
+
37
+ try:
38
+ # Convert depth to parts for comparison
39
+ root_parts = len(root_dir.parts)
40
+
41
+ # Use rglob to find all markdown files
42
+ for file_path in root_dir.rglob(pattern):
43
+ # Skip if too deep
44
+ if len(file_path.parts) - root_parts > max_depth:
45
+ continue
46
+ # Skip README.md files
47
+ if file_path.name.lower() == "readme.md":
48
+ continue
49
+ if file_path.name.lower() == "user.md":
50
+ continue
51
+ if file_path.is_file():
52
+ files.append(file_path)
53
+
54
+ logger.debug(f"Found {len(files)} markdown files in {root_dir}")
55
+ return files
56
+
57
+ except Exception as e:
58
+ logger.error(f"Error finding markdown files: {str(e)}", exc_info=True)
59
+ raise ProcessingError(f"Failed to find markdown files: {str(e)}") from e
60
+
61
+ def process_markdown_file(
62
+ file_path: Path,
63
+ trigger_prefix: str
64
+ ) -> Optional[Dict[str, Any]]:
65
+ """Process a single markdown file.
66
+
67
+ Args:
68
+ file_path: Path to markdown file
69
+ trigger_prefix: Prefix for espanso triggers
70
+
71
+ Returns:
72
+ Dictionary with file information or None if processing fails
73
+
74
+ Raises:
75
+ ProcessingError: If file processing fails
76
+ """
77
+ try:
78
+ content, extracted_sections = parse_markdown_file(str(file_path))
79
+ if extracted_sections is None:
80
+ logger.warning(f"No sections extracted from {file_path}")
81
+ extracted_sections = content
82
+
83
+ return {
84
+ 'filename': file_path.parent.name,
85
+ 'content': content,
86
+ 'purpose': extracted_sections,
87
+ 'last_modified': datetime.fromtimestamp(file_path.stat().st_mtime),
88
+ 'filesize': file_path.stat().st_size,
89
+ 'trigger': trigger_prefix,
90
+ 'label': file_path.stem # filename without extension
91
+ }
92
+
93
+ except Exception as e:
94
+ logger.error(f"Error processing {file_path}: {str(e)}", exc_info=True)
95
+ raise ProcessingError(f"Failed to process {file_path}: {str(e)}") from e
96
+
97
+ def process_markdown_files(
98
+ markdown_folder: Path | str,
99
+ # TODO: make 'max_depth' a parameter
100
+ max_depth: int = 2,
101
+ trigger_prefix: str = ";;fab"
102
+ ) -> List[Dict[str, Any]]:
103
+ """Process all markdown files in directory.
104
+
105
+ Args:
106
+ markdown_folder: Directory containing markdown files
107
+ max_depth: Maximum directory depth to search
108
+ trigger_prefix: Prefix for espanso triggers
109
+
110
+ Returns:
111
+ List of processed file information
112
+
113
+ Raises:
114
+ ProcessingError: If processing fails
115
+ ValueError: If markdown_folder is invalid
116
+ """
117
+ root_dir = Path(markdown_folder)
118
+ processed_files: List[Dict[str, Any]] = []
119
+
120
+ try:
121
+ # Find all markdown files
122
+ markdown_files = find_markdown_files(root_dir, max_depth)
123
+
124
+ # Process each file
125
+ for file_path in markdown_files:
126
+ try:
127
+ if result := process_markdown_file(file_path, trigger_prefix):
128
+ processed_files.append(result)
129
+ logger.info(f"Processed: {file_path.parent.name}")
130
+ except ProcessingError as e:
131
+ logger.error(str(e))
132
+ continue
133
+
134
+ logger.info(f"Successfully processed {len(processed_files)} files in fabric patterns folder")
135
+ return processed_files
136
+
137
+ except Exception as e:
138
+ logger.error(f"Error processing markdown files: {str(e)}", exc_info=True)
139
+ if isinstance(e, (ProcessingError, ValueError)):
140
+ raise
141
+ raise ProcessingError(f"Unexpected error processing files: {str(e)}") from e
src/fabrics_processor/logger.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from pathlib import Path
4
+ from logging.handlers import RotatingFileHandler
5
+
6
+ # Module level logger instance
7
+ _logger = None
8
+
9
+ def setup_logger(log_file='fabric_to_espanso.log'):
10
+ """
11
+ Set up and configure the logger for the application.
12
+
13
+ Args:
14
+ log_file (str): Name of the log file. Defaults to 'fabric_to_espanso.log'.
15
+
16
+ Returns:
17
+ logging.Logger: Configured logger object.
18
+ """
19
+ global _logger
20
+ if _logger is not None:
21
+ return _logger
22
+
23
+ logger = logging.getLogger('fabric_to_espanso')
24
+
25
+ # Clean up any existing handlers
26
+ logger.handlers.clear()
27
+
28
+ # Set log level and prevent propagation
29
+ logger.setLevel(logging.INFO)
30
+ logger.propagate = False
31
+
32
+ # Get the project root directory (2 levels up from logger.py)
33
+ project_root = Path(__file__).parent.parent.parent
34
+
35
+ # Create logs directory if it doesn't exist - use absolute path
36
+ log_dir = project_root / "logs"
37
+ log_dir.mkdir(exist_ok=True)
38
+
39
+ # Create file handlers with absolute path
40
+ log_file_path = log_dir / f"{log_file}"
41
+ file_handler = RotatingFileHandler(log_file_path, maxBytes=1024*1024, backupCount=5)
42
+ console_handler = logging.StreamHandler()
43
+
44
+ # Set log levels
45
+ file_handler.setLevel(logging.INFO)
46
+ console_handler.setLevel(logging.INFO)
47
+
48
+ # Create formatters and add it to handlers
49
+ file_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
50
+ console_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
51
+
52
+ file_handler.setFormatter(file_format)
53
+ console_handler.setFormatter(console_format)
54
+
55
+ # Add handlers to the logger
56
+ logger.addHandler(file_handler)
57
+ logger.addHandler(console_handler)
58
+
59
+ _logger = logger
60
+ return logger
src/fabrics_processor/markdown_parser.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Markdown parsing module for fabric-to-espanso."""
2
+ from typing import Tuple, List, Optional, Set
3
+ from pathlib import Path
4
+ import regex
5
+ import logging
6
+
7
+ from .exceptions import ParsingError
8
+ from .config import config
9
+
10
+ logger = logging.getLogger('fabric_to_espanso')
11
+
12
+ def create_section_pattern(keywords: Set[str]) -> regex.Pattern:
13
+ keyword_pattern = '|'.join(regex.escape(kw) for kw in keywords)
14
+ return regex.compile(
15
+ rf'^#\s+.*(?:{keyword_pattern}).*$\n?(?:(?!^#).*\n?)*',
16
+ regex.MULTILINE | regex.IGNORECASE
17
+ )
18
+
19
+ def parse_markdown_file(
20
+ file_path: str | Path,
21
+ keywords: Optional[Set[str]] = None
22
+ ) -> Tuple[str, Optional[str]]:
23
+ """Extract sections with specified keywords from markdown file.
24
+
25
+ Args:
26
+ file_path: Path to markdown file
27
+ keywords: Set of keywords to match in headings. If None, uses defaults from config
28
+
29
+ Returns:
30
+ Tuple of (full_content, extracted_sections)
31
+ If no sections match, returns (full_content, None)
32
+
33
+ Raises:
34
+ ParsingError: If file reading or parsing fails
35
+ """
36
+ try:
37
+ # Use provided keywords or defaults from config
38
+ keywords = keywords or set(config.base_words)
39
+
40
+ # Create regex pattern for keywords in headings and text
41
+ section_pattern = create_section_pattern(keywords)
42
+
43
+ # Read file content
44
+ path = Path(file_path)
45
+ try:
46
+ content = path.read_text(encoding='utf-8')
47
+ except Exception as e:
48
+ raise ParsingError(f"Failed to read {path}: {str(e)}") from e
49
+
50
+ # Find all matching headings
51
+ section_matches = list(section_pattern.findall(content))
52
+
53
+ # If no matches found, return full content
54
+ if not section_matches:
55
+ logger.debug(f"No matching sections found in {path.name}")
56
+ return content, None
57
+
58
+ # Join sections with double newline
59
+ extracted = '\n\n'.join(section_matches)
60
+ logger.debug(f"Extracted {len(section_matches)} sections from {path.name}")
61
+
62
+ return content, extracted
63
+
64
+ except Exception as e:
65
+ logger.error(f"Error parsing {file_path}: {str(e)}", exc_info=True)
66
+ if isinstance(e, ParsingError):
67
+ raise
68
+ raise ParsingError(f"Unexpected error parsing {file_path}: {str(e)}") from e
69
+
70
+ def main():
71
+ # Example usage
72
+ try:
73
+ # Custom keywords can be passed as second argument
74
+ result = parse_markdown_file('document.md')
75
+ # result = extract_sections('document.md', {'Identity', 'Purpose', 'Scope'})
76
+
77
+ print(result)
78
+
79
+ except Exception as e:
80
+ print(f"An error occurred: {e}")
81
+
82
+ if __name__ == '__main__':
83
+ main()
src/fabrics_processor/obsidian2fabric.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+ from shutil import copy2, rmtree
3
+ from fastcore.utils import L
4
+ import re
5
+
6
+ from src.fabrics_processor.config import config
7
+
8
+ def sentence2snake(name: str) -> str:
9
+ """Convert any string to snake_case, replacing non-alphanumeric with underscore"""
10
+ s1 = name.lower()
11
+ s2 = re.sub(r'\W', r'_', s1)
12
+ return re.sub(r'_+', r'_', s2)
13
+
14
+ def round_timestamp(ts: float) -> int:
15
+ """Round timestamp to handle filesystem differences"""
16
+ return int(str(ts).split('.')[0][:-4])
17
+
18
+ def get_md_files_obsidian(path: Path) -> dict:
19
+ """Get files from obsidian vault: stem -> (path, timestamp, size)"""
20
+ # Rename files to snake_case and add identifier to distinguish own prompts from others
21
+ return {sentence2snake(p.stem)+"-"+p.parent.name.lower(): (p, p.stat().st_mtime, p.stat().st_size)
22
+ for p in Path(path).glob('**/*.md')}
23
+
24
+ def get_md_files_fabricsfolder(path: Path) -> dict:
25
+ """Get files from target structure: dir_name -> (system.md_path, timestamp, size)"""
26
+ target_subdirs = [x for x in path.iterdir() if x.is_dir()]
27
+ return {x.stem: (x/'system.md', (x/'system.md').stat().st_mtime, (x/'system.md').stat().st_size)
28
+ for x in target_subdirs
29
+ if (x/'system.md').exists()}
30
+
31
+ def get_modified_files(source_files: dict, target_files: dict) -> list:
32
+ """Compare timestamps between source and target files, returns dictionary of
33
+ entries needing updates. The dictionary has the filename as key and the following
34
+ values:
35
+ path, timestamp, size"""
36
+ existing_files = L(k for k in source_files.keys() if k in target_files)
37
+ # removed checking for timestamp. Because you don't want false positives because of file system differences
38
+ # or daylight savings. But you also want to be able to update files that have almost the same timestamp
39
+ # when you change the file.
40
+ # different_timestamps = L(k for k in existing_files
41
+ # if round_timestamp(source_files[k][1]) > round_timestamp(target_files[k][1]))
42
+ # return L(source_files[k][0] for k in different_timestamps
43
+ # if source_files[k][2] != target_files[k][2])
44
+ return L({k: source_files[k]} for k in existing_files if source_files[k][2] != target_files[k][2])
45
+
46
+ def get_new_files(source_files: dict, target_files: dict) -> list:
47
+ """Return list of dictionaries containing with the key as filename and these values:
48
+ path, timestamp, size"""
49
+ return L({k: source_files[k]} for k in source_files.keys() if k not in target_files)
50
+
51
+ def process_file(source: dict, target_dir: Path) -> None:
52
+ """
53
+ Process a single file: create directory, copy as system.md, create user.md
54
+
55
+ Args:
56
+ source: Dict of source file: filename:(path, timestamp, size)
57
+ target_dir: Base target directory (e.g. 'md_target')
58
+ """
59
+ filename = next(iter(source))
60
+ filepath = next(iter(source.values()))[0]
61
+ subdir = target_dir/filename
62
+ subdir.mkdir(mode=0o755, exist_ok=True)
63
+ copy2(filepath, subdir/'system.md')
64
+ (subdir/'user.md').touch()
65
+
66
+ def sync_folders(source_dir: Path, target_dir: Path) -> None:
67
+ """
68
+ Main function to synchronize folders
69
+
70
+ Args:
71
+ source_dir: Path to source directory (obsidian vault)
72
+ target_dir: Path to target directory (fabrics folder)
73
+ """
74
+ source_files = get_md_files_obsidian(Path(source_dir))
75
+ target_files = get_md_files_fabricsfolder(Path(target_dir))
76
+
77
+ # Get all files that need processing
78
+ files_to_process = L(get_new_files(source_files, target_files) +
79
+ get_modified_files(source_files, target_files))
80
+
81
+ # Process each file
82
+ for i in files_to_process:
83
+ process_file(i, target_dir)
84
+
85
+ # Get all files that need deleting
86
+ files_to_delete = L(k for k in target_files.keys() if k not in source_files and "-" in k)
87
+
88
+ # Delete each directory and its contents
89
+ for file_name in files_to_delete:
90
+ rmtree(target_dir/file_name)
src/fabrics_processor/output_files_generator.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """YAML file generation for fabric-to-espanso and
2
+ markdown file generation for Obsidian TextGenerator plugin."""
3
+ from pathlib import Path
4
+ from shutil import rmtree
5
+ from typing import Dict, Any, List
6
+ import yaml
7
+ import logging
8
+
9
+ from qdrant_client import QdrantClient
10
+ from qdrant_client.http.exceptions import UnexpectedResponse
11
+ from src.fabrics_processor.config import config
12
+
13
+ from .exceptions import DatabaseError
14
+
15
+ logger = logging.getLogger('fabric_to_espanso')
16
+
17
+ class BlockString(str):
18
+ """String subclass for YAML block-style string representation."""
19
+ pass
20
+
21
+ def repr_block_string(dumper: yaml.Dumper, data: BlockString) -> yaml.ScalarNode:
22
+ """Custom YAML representer for block strings."""
23
+ return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
24
+
25
+ yaml.add_representer(BlockString, repr_block_string)
26
+
27
+ def generate_yaml_file(client: QdrantClient, collection_name: str, yaml_output_folder: str) -> None:
28
+ """Generate a complete YAML file from the Qdrant database.
29
+
30
+ Args:
31
+ client: Initialized Qdrant client
32
+ yaml_output_folder: Directory where the YAML file will be created
33
+
34
+ Raises:
35
+ DatabaseError: If database query fails
36
+ OSError: If file operations fail
37
+ ValueError: If output folder is invalid
38
+ """
39
+ try:
40
+ # Validate output folder
41
+ output_path = Path(yaml_output_folder)
42
+ if not output_path.exists():
43
+ logger.info(f"YAML output path doesn't exist. Check the Espanso matches directory with `espanso path` in PowerShell: {output_path}")
44
+ raise ValueError(f"YAML output path doesn't exist. Check the Espanso matches directory with `espanso path` in PowerShell: {output_path}")
45
+
46
+ # Query all entries from the database
47
+ try:
48
+ results = client.scroll(
49
+ collection_name=collection_name,
50
+ limit=10000 # Adjust based on expected maximum files
51
+ )[0]
52
+ except UnexpectedResponse as e:
53
+ raise DatabaseError(f"Failed to query database: {str(e)}") from e
54
+
55
+ # Prepare YAML data
56
+ data: Dict[str, List[Dict[str, Any]]] = {'matches': []}
57
+
58
+ for result in results:
59
+ entry = {
60
+ 'trigger': result.payload['trigger'],
61
+ 'replace': BlockString(result.payload['content'] + '\n{{clipb}}'),
62
+ 'label': result.payload['filename'],
63
+ 'vars': [
64
+ {'name': 'clipb', 'type': 'clipboard'}
65
+ ]
66
+ }
67
+ data['matches'].append(entry)
68
+
69
+ # Write the YAML file
70
+ yaml_output_path = output_path / "fabric_patterns.yml"
71
+ with open(yaml_output_path, 'w') as yaml_file:
72
+ yaml.dump(data, yaml_file, sort_keys=False, default_flow_style=False)
73
+
74
+ logger.info(f"YAML file generated successfully at {yaml_output_path}")
75
+ except Exception as e:
76
+ logger.error(f"Error generating YAML file: {str(e)}", exc_info=True)
77
+ if isinstance(e, (DatabaseError, OSError, ValueError)):
78
+ raise
79
+ raise RuntimeError(f"Unexpected error generating YAML: {str(e)}") from e
80
+
81
+ def generate_markdown_files(client: QdrantClient, collection_name: str, markdown_output_folder: str) -> None:
82
+ """Generate markdown files from the Qdrant database.
83
+
84
+ Args:
85
+ client: Initialized Qdrant client
86
+ collection_name: Name of the collection to query
87
+ markdown_output_folder: Directory where the markdown files will be created
88
+
89
+ Raises:
90
+ DatabaseError: If database query fails
91
+ OSError: If file operations fail
92
+ ValueError: If output folder is invalid
93
+ """
94
+ try:
95
+ # Validate output folder
96
+ output_path = Path(markdown_output_folder)
97
+ if not output_path.exists():
98
+ logger.info(f"Markdown output path doesn't exist. Check if this folder in parameters.py matches the Textgenerator folder in you Obsidian vault. {output_path}")
99
+ raise ValueError(f"Markdown output path doesn't exist. Check if this folder in parameters.py matches the Textgenerator folder in you Obsidian vault. {output_path}")
100
+
101
+ # Query all entries from the database
102
+ try:
103
+ # Simply first remove all existing markdown files in Obisdian Textgenerator folder
104
+ rmtree(output_path)
105
+ output_path.mkdir(mode=0o755)
106
+
107
+ results = client.scroll(
108
+ collection_name=collection_name,
109
+ limit=10000 # Adjust based on expected maximum files
110
+ )[0]
111
+
112
+ # Generate markdown files for each entry
113
+ for result in results:
114
+ metadata = result.payload
115
+ filename = metadata['filename']
116
+ purpose = metadata['purpose']
117
+ content = metadata['content']
118
+ markdown_path = output_path / f"{filename}.md"
119
+ with open(markdown_path, 'w', encoding='utf-8') as markdown_file:
120
+ markdown_file.write(apply_markdown_template(filename, purpose, content))
121
+
122
+ logger.info(f"Generated {len(results)} Markdown files generated successfully at {markdown_output_folder}")
123
+
124
+ except UnexpectedResponse as e:
125
+ raise DatabaseError(f"Failed to query database: {str(e)}") from e
126
+
127
+ except Exception as e:
128
+ logger.error(f"Error generating Markdown files: {str(e)}")
129
+
130
+
131
+ def apply_markdown_template(filename: str, purpose: str, content: str) -> str:
132
+ """Apply the markdown template to the given content.
133
+ To generate markdown files that can be used in Obsidian by
134
+ the TextGenerator plugin."""
135
+
136
+ # Ensure proper indentation
137
+ purpose_indented = purpose.replace('\n', '\n ')
138
+ content_indented = content.replace('\n', '\n ')
139
+
140
+ return f"""---
141
+ PromptInfo:
142
+ promptId: {filename}
143
+ name: {filename}
144
+ description: |
145
+ {purpose_indented}
146
+ required_values:
147
+ author: fabric
148
+ tags:
149
+ version: 1
150
+ config:
151
+ mode: insert
152
+ system: |
153
+ {content_indented}
154
+ ---
155
+
156
+ {{{{selection}}}}
157
+ """
src/fabrics_processor/output_files_generator_temp.py ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from pathlib import Path
3
+ from qdrant_client import QdrantClient
4
+
5
+ logger = logging.getLogger('fabric_to_espanso')
6
+
7
+ def generate_yaml(markdown_content, filename, trigger="/:", label=None):
8
+ """
9
+ Generate YAML content from parsed markdown.
10
+
11
+ Args:
12
+ markdown_content (str): The content of the markdown file.
13
+ filename (str): The name of the markdown file (without extension).
14
+ trigger (str, optional): The trigger for the YAML entry. Defaults to "/:".
15
+ label (str, optional): The label for the YAML entry. If None, uses the filename.
16
+
17
+ Returns:
18
+ str: The generated YAML content.
19
+ """
20
+ try:
21
+ # Clean and format the markdown content
22
+ content = markdown_content.strip()
23
+ # Remove extra newlines and normalize spacing
24
+ content = '\n'.join(line.strip() for line in content.split('\n') if line.strip())
25
+ # Add INPUT section at the end
26
+ content = f"{content}\n\n# INPUT\n{{clipb}}"
27
+
28
+ return content
29
+ except Exception as e:
30
+ logger.error(f"Error generating YAML for {filename}: {str(e)}", exc_info=True)
31
+ return None
32
+
33
+ def generate_markdown(filename: str, content: str, purpose: str) -> str:
34
+ """
35
+ Generate markdown content for a database entry.
36
+
37
+ Args:
38
+ filename (str): The name of the markdown file (without extension).
39
+ content (str): The system content/instructions.
40
+ purpose (str): The purpose/description of the prompt.
41
+
42
+ Returns:
43
+ str: The generated markdown content.
44
+ """
45
+ markdown_template = f"""---
46
+ PromptInfo:
47
+ promptId: {filename}
48
+ name: {filename}
49
+ description: {purpose}
50
+ required_values:
51
+ author: fabric
52
+ tags:
53
+ version: 1
54
+ config:
55
+ mode: insert
56
+ system: {content}
57
+ ---
58
+
59
+ {{{{selection}}}}
60
+ """
61
+ return markdown_template
62
+
63
+ def generate_markdown_files(client: QdrantClient, collection_name: str, output_folder: str) -> None:
64
+ """Generate markdown files from the Qdrant database.
65
+
66
+ Args:
67
+ client: Initialized Qdrant client
68
+ collection_name: Name of the collection to query
69
+ output_folder: Directory where the markdown files will be created
70
+
71
+ Raises:
72
+ DatabaseError: If database query fails
73
+ OSError: If file operations fail
74
+ ValueError: If output folder is invalid
75
+ """
76
+ try:
77
+ # Validate output folder
78
+ output_path = Path(output_folder)
79
+ if not output_path.exists():
80
+ output_path.mkdir(parents=True, exist_ok=True)
81
+ logger.info(f"Created markdown output directory: {output_path}")
82
+
83
+ # Query all entries from the database
84
+ try:
85
+ results = client.scroll(
86
+ collection_name=collection_name,
87
+ limit=10000 # Adjust based on expected maximum files
88
+ )[0] # scroll returns a tuple (points, next_page_offset)
89
+
90
+ # Generate markdown files for each entry
91
+ for point in results:
92
+ metadata = point.metadata
93
+ filename = metadata['filename']
94
+ content = metadata['content']
95
+ purpose = metadata['purpose']
96
+
97
+ # Generate markdown content
98
+ markdown_content = generate_markdown(filename, content, purpose)
99
+
100
+ # Write to file
101
+ file_path = output_path / f"{filename}.md"
102
+ with open(file_path, 'w', encoding='utf-8') as f:
103
+ f.write(markdown_content)
104
+
105
+ logger.info(f"Generated {len(results)} markdown files in {output_path}")
106
+
107
+ except Exception as e:
108
+ logger.error(f"Error querying database: {e}")
109
+ raise
110
+
111
+ except Exception as e:
112
+ logger.error(f"Error generating markdown files: {e}")
113
+ raise
src/search_qdrant/__init__.py ADDED
File without changes
src/search_qdrant/database_query.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from src.fabrics_processor.database import initialize_qdrant_database
3
+ from qdrant_client import QdrantClient
4
+ from qdrant_client.models import QueryResponse
5
+ import argparse
6
+ from src.fabrics_processor.config import config
7
+
8
+ def query_qdrant_database(
9
+ query: str,
10
+ client: QdrantClient,
11
+ num_results: int = 5,
12
+ collection_name: str = config.embedding.collection_name) -> list[QueryResponse]:
13
+ """Query the Qdrant database for similar documents.
14
+
15
+ Args:
16
+ query: The search query text
17
+ client: Initialized QdrantClient instance
18
+ num_results: Maximum number of results to return
19
+ collection_name: Name of the collection to query
20
+
21
+ Returns:
22
+ List of QueryResponse objects containing matches
23
+
24
+ Raises:
25
+ QdrantException: If there's an error querying the database
26
+ """
27
+ try:
28
+ results = client.query(collection_name=collection_name, query_text=query, limit=num_results)
29
+ return results
30
+ except Exception as e:
31
+ logging.error(f"Error querying Qdrant database: {e}")
32
+ raise
33
+
34
+ def main():
35
+ client = initialize_qdrant_database()
36
+
37
+ parser = argparse.ArgumentParser(description="Query Qdrant database")
38
+ parser.add_argument("query", type=str, help="The search query text")
39
+ parser.add_argument("--num_results", "-n", type=int, default=5, help="The number of results to return (default: 5)")
40
+ parser.add_argument("--collection_name", "-c", type=str, default=config.embedding.collection_name, help="The name of the collection to query.")
41
+
42
+ args = parser.parse_args()
43
+
44
+ try:
45
+ results = query_qdrant_database(query=args.query,
46
+ client=client,
47
+ num_results=args.num_results,
48
+ collection_name=args.collection_name
49
+ )
50
+
51
+ filenames = [r.metadata['filename'] for r in results]
52
+ print(filenames)
53
+ finally:
54
+ client.close()
55
+
56
+ if __name__ == "__main__":
57
+ main()
src/search_qdrant/logs/fabric_to_espanso.log.1 ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-01-10 20:20:51,583 - fabric_to_espanso - INFO - Processed: create_ttrc_narrative
2
+ 2025-01-10 20:20:51,597 - fabric_to_espanso - INFO - Processed: prepare_7s_strategy
3
+ 2025-01-10 20:20:51,601 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/convert_to_markdown/system.md
4
+ 2025-01-10 20:20:51,606 - fabric_to_espanso - INFO - Processed: convert_to_markdown
5
+ 2025-01-10 20:20:51,611 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/analyze_incident/system.md
6
+ 2025-01-10 20:20:51,615 - fabric_to_espanso - INFO - Processed: analyze_incident
7
+ 2025-01-10 20:20:51,619 - fabric_to_espanso - INFO - Processed: summarize_meeting
8
+ 2025-01-10 20:20:51,623 - fabric_to_espanso - INFO - Processed: create_formal_email
9
+ 2025-01-10 20:20:51,627 - fabric_to_espanso - INFO - Processed: refine_design_document
10
+ 2025-01-10 20:20:51,631 - fabric_to_espanso - INFO - Processed: improve_prompt
11
+ 2025-01-10 20:20:51,636 - fabric_to_espanso - INFO - Processed: create_logo
12
+ 2025-01-10 20:20:51,640 - fabric_to_espanso - INFO - Processed: create_network_threat_landscape
13
+ 2025-01-10 20:20:51,645 - fabric_to_espanso - INFO - Processed: extract_most_redeeming_thing
14
+ 2025-01-10 20:20:51,649 - fabric_to_espanso - INFO - Processed: create_rpg_summary
15
+ 2025-01-10 20:20:51,653 - fabric_to_espanso - INFO - Processed: analyze_proposition
16
+ 2025-01-10 20:20:51,658 - fabric_to_espanso - INFO - Processed: write_nuclei_template_rule
17
+ 2025-01-10 20:20:51,664 - fabric_to_espanso - INFO - Processed: analyze_email_headers
18
+ 2025-01-10 20:20:51,668 - fabric_to_espanso - INFO - Processed: analyze_presentation
19
+ 2025-01-10 20:20:51,672 - fabric_to_espanso - INFO - Processed: improve_writing
20
+ 2025-01-10 20:20:51,677 - fabric_to_espanso - INFO - Processed: create_user_story
21
+ 2025-01-10 20:20:51,682 - fabric_to_espanso - INFO - Processed: create_stride_threat_model
22
+ 2025-01-10 20:20:51,686 - fabric_to_espanso - INFO - Processed: analyze_debate
23
+ 2025-01-10 20:20:51,691 - fabric_to_espanso - INFO - Processed: analyze_spiritual_text
24
+ 2025-01-10 20:20:51,696 - fabric_to_espanso - INFO - Processed: write_pull-request
25
+ 2025-01-10 20:20:51,701 - fabric_to_espanso - INFO - Processed: extract_insights_dm
26
+ 2025-01-10 20:20:51,705 - fabric_to_espanso - INFO - Processed: analyze_military_strategy
27
+ 2025-01-10 20:20:51,709 - fabric_to_espanso - INFO - Processed: analyze_personality
28
+ 2025-01-10 20:20:51,714 - fabric_to_espanso - INFO - Processed: transcribe_minutes
29
+ 2025-01-10 20:20:51,718 - fabric_to_espanso - INFO - Processed: extract_recipe
30
+ 2025-01-10 20:20:51,722 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/write_python_code_with_explanations/system.md
31
+ 2025-01-10 20:20:51,726 - fabric_to_espanso - INFO - Processed: write_python_code_with_explanations
32
+ 2025-01-10 20:20:51,730 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/summarize_paper/system.md
33
+ 2025-01-10 20:20:51,734 - fabric_to_espanso - INFO - Processed: summarize_paper
34
+ 2025-01-10 20:20:51,738 - fabric_to_espanso - INFO - Processed: check_agreement
35
+ 2025-01-10 20:20:51,744 - fabric_to_espanso - INFO - Processed: find_logical_fallacies
36
+ 2025-01-10 20:20:51,749 - fabric_to_espanso - INFO - Processed: extract_wisdom
37
+ 2025-01-10 20:20:51,753 - fabric_to_espanso - INFO - Processed: extract_wisdom_nometa
38
+ 2025-01-10 20:20:51,757 - fabric_to_espanso - INFO - Processed: create_image_prompt_from_book_extract
39
+ 2025-01-10 20:20:51,762 - fabric_to_espanso - INFO - Processed: identify_dsrp_distinctions
40
+ 2025-01-10 20:20:51,766 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/rewrite_python_code_with_explanations/system.md
41
+ 2025-01-10 20:20:51,770 - fabric_to_espanso - INFO - Processed: rewrite_python_code_with_explanations
42
+ 2025-01-10 20:20:51,774 - fabric_to_espanso - INFO - Processed: extract_controversial_ideas
43
+ 2025-01-10 20:20:51,779 - fabric_to_espanso - INFO - Processed: create_tags
44
+ 2025-01-10 20:20:51,783 - fabric_to_espanso - INFO - Processed: review_design
45
+ 2025-01-10 20:20:51,787 - fabric_to_espanso - INFO - Processed: create_art_prompt
46
+ 2025-01-10 20:20:51,791 - fabric_to_espanso - INFO - Processed: analyze_patent
47
+ 2025-01-10 20:20:51,795 - fabric_to_espanso - INFO - Processed: identify_dsrp_relationships
48
+ 2025-01-10 20:20:51,799 - fabric_to_espanso - INFO - Processed: analyze_cfp_submission
49
+ 2025-01-10 20:20:51,803 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization_for_github
50
+ 2025-01-10 20:20:51,807 - fabric_to_espanso - INFO - Processed: create_graph_from_input
51
+ 2025-01-10 20:20:51,812 - fabric_to_espanso - INFO - Processed: extract_main_idea
52
+ 2025-01-10 20:20:51,816 - fabric_to_espanso - INFO - Processed: extract_latest_video
53
+ 2025-01-10 20:20:51,819 - fabric_to_espanso - INFO - Processed: extract_core_message
54
+ 2025-01-10 20:20:51,823 - fabric_to_espanso - INFO - Processed: extract_jokes
55
+ 2025-01-10 20:20:51,827 - fabric_to_espanso - INFO - Processed: create_academic_paper
56
+ 2025-01-10 20:20:51,831 - fabric_to_espanso - INFO - Processed: create_reading_plan
57
+ 2025-01-10 20:20:51,835 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/added_something_stupid/system.md
58
+ 2025-01-10 20:20:51,839 - fabric_to_espanso - INFO - Processed: added_something_stupid
59
+ 2025-01-10 20:20:51,844 - fabric_to_espanso - INFO - Processed: analyze_risk
60
+ 2025-01-10 20:20:51,848 - fabric_to_espanso - INFO - Processed: improve_report_finding
61
+ 2025-01-10 20:20:51,852 - fabric_to_espanso - INFO - Processed: explain_math
62
+ 2025-01-10 20:20:51,855 - fabric_to_espanso - INFO - Processed: summarize_git_changes
63
+ 2025-01-10 20:20:51,860 - fabric_to_espanso - INFO - Processed: recommend_talkpanel_topics
64
+ 2025-01-10 20:20:51,864 - fabric_to_espanso - INFO - Processed: extract_predictions
65
+ 2025-01-10 20:20:51,868 - fabric_to_espanso - INFO - Processed: extract_primary_solution
66
+ 2025-01-10 20:20:51,872 - fabric_to_espanso - INFO - Processed: extract_videoid
67
+ 2025-01-10 20:20:51,877 - fabric_to_espanso - INFO - Processed: create_show_intro
68
+ 2025-01-10 20:20:51,881 - fabric_to_espanso - INFO - Processed: summarize_git_diff
69
+ 2025-01-10 20:20:51,884 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/website_description/system.md
70
+ 2025-01-10 20:20:51,889 - fabric_to_espanso - INFO - Processed: website_description
71
+ 2025-01-10 20:20:51,892 - fabric_to_espanso - INFO - Processed: create_quiz
72
+ 2025-01-10 20:20:51,897 - fabric_to_espanso - INFO - Processed: write_semgrep_rule
73
+ 2025-01-10 20:20:51,901 - fabric_to_espanso - INFO - Processed: write_hackerone_report
74
+ 2025-01-10 20:20:51,905 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/translate_to_dutch_or_from_dutch_to_english/system.md
75
+ 2025-01-10 20:20:51,909 - fabric_to_espanso - INFO - Processed: translate_to_dutch_or_from_dutch_to_english
76
+ 2025-01-10 20:20:51,913 - fabric_to_espanso - INFO - Processed: summarize_micro
77
+ 2025-01-10 20:20:51,917 - fabric_to_espanso - INFO - Processed: create_ai_jobs_analysis
78
+ 2025-01-10 20:20:51,920 - fabric_to_espanso - INFO - Processed: create_pattern
79
+ 2025-01-10 20:20:51,924 - fabric_to_espanso - INFO - Processed: capture_thinkers_work
80
+ 2025-01-10 20:20:51,929 - fabric_to_espanso - INFO - Processed: analyze_prose_pinker
81
+ 2025-01-10 20:20:51,933 - fabric_to_espanso - INFO - Processed: create_threat_scenarios
82
+ 2025-01-10 20:20:51,938 - fabric_to_espanso - INFO - Processed: extract_ctf_writeup
83
+ 2025-01-10 20:20:51,941 - fabric_to_espanso - INFO - Processed: create_fabric_patterns
84
+ 2025-01-10 20:20:51,946 - fabric_to_espanso - INFO - Processed: ai
85
+ 2025-01-10 20:20:51,950 - fabric_to_espanso - INFO - Processed: rate_ai_response
86
+ 2025-01-10 20:20:51,954 - fabric_to_espanso - INFO - Processed: create_prd
87
+ 2025-01-10 20:20:51,958 - fabric_to_espanso - INFO - Processed: clean_text
88
+ 2025-01-10 20:20:51,962 - fabric_to_espanso - INFO - Processed: create_video_chapters
89
+ 2025-01-10 20:20:51,966 - fabric_to_espanso - INFO - Processed: summarize_lecture
90
+ 2025-01-10 20:20:51,971 - fabric_to_espanso - INFO - Processed: identify_dsrp_perspectives
91
+ 2025-01-10 20:20:51,975 - fabric_to_espanso - INFO - Processed: recommend_artists
92
+ 2025-01-10 20:20:51,979 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed/system.md
93
+ 2025-01-10 20:20:51,983 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed
94
+ 2025-01-10 20:20:51,987 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed_extreme/system.md
95
+ 2025-01-10 20:20:51,992 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed_extreme
96
+ 2025-01-10 20:20:51,996 - fabric_to_espanso - INFO - Processed: extract_ideas
97
+ 2025-01-10 20:20:52,000 - fabric_to_espanso - INFO - Processed: to_flashcards
98
+ 2025-01-10 20:20:52,016 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/extract_instructions/system.md
99
+ 2025-01-10 20:20:52,028 - fabric_to_espanso - INFO - Processed: extract_instructions
100
+ 2025-01-10 20:20:52,032 - fabric_to_espanso - INFO - Processed: write_micro_essay
101
+ 2025-01-10 20:20:52,036 - fabric_to_espanso - INFO - Processed: extract_keywords_and_subjects_from_text
102
+ 2025-01-10 20:20:52,045 - fabric_to_espanso - INFO - Processed: extract_primary_problem
103
+ 2025-01-10 20:20:52,053 - fabric_to_espanso - INFO - Processed: create_hormozi_offer
104
+ 2025-01-10 20:20:52,059 - fabric_to_espanso - INFO - Processed: analyze_prose
105
+ 2025-01-10 20:20:52,064 - fabric_to_espanso - INFO - Processed: analyze_logs
106
+ 2025-01-10 20:20:52,068 - fabric_to_espanso - INFO - Processed: create_recursive_outline
107
+ 2025-01-10 20:20:52,072 - fabric_to_espanso - INFO - Processed: analyze_tech_impact
108
+ 2025-01-10 20:20:52,077 - fabric_to_espanso - INFO - Processed: find_hidden_message
109
+ 2025-01-10 20:20:52,088 - fabric_to_espanso - INFO - Processed: create_npc
110
+ 2025-01-10 20:20:52,104 - fabric_to_espanso - INFO - Processed: provide_guidance
111
+ 2025-01-10 20:20:52,113 - fabric_to_espanso - INFO - Processed: export_data_as_csv
112
+ 2025-01-10 20:20:52,120 - fabric_to_espanso - INFO - Processed: show_fabric_options_markmap
113
+ 2025-01-10 20:20:52,127 - fabric_to_espanso - INFO - Processed: summarize_debate
114
+ 2025-01-10 20:20:52,134 - fabric_to_espanso - INFO - Processed: answer_interview_question
115
+ 2025-01-10 20:20:52,141 - fabric_to_espanso - INFO - Processed: extract_poc
116
+ 2025-01-10 20:20:52,148 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process/system.md
117
+ 2025-01-10 20:20:52,155 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process
118
+ 2025-01-10 20:20:52,163 - fabric_to_espanso - INFO - Processed: rate_content
119
+ 2025-01-10 20:20:52,170 - fabric_to_espanso - INFO - Processed: create_diy
120
+ 2025-01-10 20:20:52,178 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/create_costar_prompt/system.md
121
+ 2025-01-10 20:20:52,186 - fabric_to_espanso - INFO - Processed: create_costar_prompt
122
+ 2025-01-10 20:20:52,193 - fabric_to_espanso - INFO - Processed: create_idea_compass
123
+ 2025-01-10 20:20:52,200 - fabric_to_espanso - INFO - Processed: create_security_update
124
+ 2025-01-10 20:20:52,206 - fabric_to_espanso - INFO - Processed: extract_recommendations
125
+ 2025-01-10 20:20:52,214 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/md_callout/system.md
126
+ 2025-01-10 20:20:52,222 - fabric_to_espanso - INFO - Processed: md_callout
127
+ 2025-01-10 20:20:52,229 - fabric_to_espanso - INFO - Processed: analyze_threat_report
128
+ 2025-01-10 20:20:52,236 - fabric_to_espanso - INFO - Processed: dialog_with_socrates
129
+ 2025-01-10 20:20:52,245 - fabric_to_espanso - INFO - Processed: summarize_newsletter
130
+ 2025-01-10 20:20:52,251 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization
131
+ 2025-01-10 20:20:52,258 - fabric_to_espanso - INFO - Processed: analyze_comments
132
+ 2025-01-10 20:20:52,265 - fabric_to_espanso - INFO - Processed: summarize
133
+ 2025-01-10 20:20:52,271 - fabric_to_espanso - INFO - Processed: compare_and_contrast
134
+ 2025-01-10 20:20:52,278 - fabric_to_espanso - INFO - Successfully processed 199 files in fabric patterns folder
135
+ 2025-01-10 20:20:52,323 - fabric_to_espanso - INFO - Changes detected: 0 new, 0 modified, 0 deleted
src/search_qdrant/logs/fabric_to_espanso.log.2 ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-01-10 20:20:51,570 - fabric_to_espanso - INFO - Processed: extract_wisdom_dm
2
+ 2025-01-10 20:20:51,583 - fabric_to_espanso - INFO - Processed: create_ttrc_narrative
3
+ 2025-01-10 20:20:51,597 - fabric_to_espanso - INFO - Processed: prepare_7s_strategy
4
+ 2025-01-10 20:20:51,601 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/convert_to_markdown/system.md
5
+ 2025-01-10 20:20:51,606 - fabric_to_espanso - INFO - Processed: convert_to_markdown
6
+ 2025-01-10 20:20:51,611 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/analyze_incident/system.md
7
+ 2025-01-10 20:20:51,615 - fabric_to_espanso - INFO - Processed: analyze_incident
8
+ 2025-01-10 20:20:51,619 - fabric_to_espanso - INFO - Processed: summarize_meeting
9
+ 2025-01-10 20:20:51,623 - fabric_to_espanso - INFO - Processed: create_formal_email
10
+ 2025-01-10 20:20:51,627 - fabric_to_espanso - INFO - Processed: refine_design_document
11
+ 2025-01-10 20:20:51,631 - fabric_to_espanso - INFO - Processed: improve_prompt
12
+ 2025-01-10 20:20:51,636 - fabric_to_espanso - INFO - Processed: create_logo
13
+ 2025-01-10 20:20:51,640 - fabric_to_espanso - INFO - Processed: create_network_threat_landscape
14
+ 2025-01-10 20:20:51,645 - fabric_to_espanso - INFO - Processed: extract_most_redeeming_thing
15
+ 2025-01-10 20:20:51,649 - fabric_to_espanso - INFO - Processed: create_rpg_summary
16
+ 2025-01-10 20:20:51,653 - fabric_to_espanso - INFO - Processed: analyze_proposition
17
+ 2025-01-10 20:20:51,658 - fabric_to_espanso - INFO - Processed: write_nuclei_template_rule
18
+ 2025-01-10 20:20:51,664 - fabric_to_espanso - INFO - Processed: analyze_email_headers
19
+ 2025-01-10 20:20:51,668 - fabric_to_espanso - INFO - Processed: analyze_presentation
20
+ 2025-01-10 20:20:51,672 - fabric_to_espanso - INFO - Processed: improve_writing
21
+ 2025-01-10 20:20:51,677 - fabric_to_espanso - INFO - Processed: create_user_story
22
+ 2025-01-10 20:20:51,682 - fabric_to_espanso - INFO - Processed: create_stride_threat_model
23
+ 2025-01-10 20:20:51,686 - fabric_to_espanso - INFO - Processed: analyze_debate
24
+ 2025-01-10 20:20:51,691 - fabric_to_espanso - INFO - Processed: analyze_spiritual_text
25
+ 2025-01-10 20:20:51,696 - fabric_to_espanso - INFO - Processed: write_pull-request
26
+ 2025-01-10 20:20:51,701 - fabric_to_espanso - INFO - Processed: extract_insights_dm
27
+ 2025-01-10 20:20:51,705 - fabric_to_espanso - INFO - Processed: analyze_military_strategy
28
+ 2025-01-10 20:20:51,709 - fabric_to_espanso - INFO - Processed: analyze_personality
29
+ 2025-01-10 20:20:51,714 - fabric_to_espanso - INFO - Processed: transcribe_minutes
30
+ 2025-01-10 20:20:51,718 - fabric_to_espanso - INFO - Processed: extract_recipe
31
+ 2025-01-10 20:20:51,722 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/write_python_code_with_explanations/system.md
32
+ 2025-01-10 20:20:51,726 - fabric_to_espanso - INFO - Processed: write_python_code_with_explanations
33
+ 2025-01-10 20:20:51,730 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/summarize_paper/system.md
34
+ 2025-01-10 20:20:51,734 - fabric_to_espanso - INFO - Processed: summarize_paper
35
+ 2025-01-10 20:20:51,738 - fabric_to_espanso - INFO - Processed: check_agreement
36
+ 2025-01-10 20:20:51,744 - fabric_to_espanso - INFO - Processed: find_logical_fallacies
37
+ 2025-01-10 20:20:51,749 - fabric_to_espanso - INFO - Processed: extract_wisdom
38
+ 2025-01-10 20:20:51,753 - fabric_to_espanso - INFO - Processed: extract_wisdom_nometa
39
+ 2025-01-10 20:20:51,757 - fabric_to_espanso - INFO - Processed: create_image_prompt_from_book_extract
40
+ 2025-01-10 20:20:51,762 - fabric_to_espanso - INFO - Processed: identify_dsrp_distinctions
41
+ 2025-01-10 20:20:51,766 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/rewrite_python_code_with_explanations/system.md
42
+ 2025-01-10 20:20:51,770 - fabric_to_espanso - INFO - Processed: rewrite_python_code_with_explanations
43
+ 2025-01-10 20:20:51,774 - fabric_to_espanso - INFO - Processed: extract_controversial_ideas
44
+ 2025-01-10 20:20:51,779 - fabric_to_espanso - INFO - Processed: create_tags
45
+ 2025-01-10 20:20:51,783 - fabric_to_espanso - INFO - Processed: review_design
46
+ 2025-01-10 20:20:51,787 - fabric_to_espanso - INFO - Processed: create_art_prompt
47
+ 2025-01-10 20:20:51,791 - fabric_to_espanso - INFO - Processed: analyze_patent
48
+ 2025-01-10 20:20:51,795 - fabric_to_espanso - INFO - Processed: identify_dsrp_relationships
49
+ 2025-01-10 20:20:51,799 - fabric_to_espanso - INFO - Processed: analyze_cfp_submission
50
+ 2025-01-10 20:20:51,803 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization_for_github
51
+ 2025-01-10 20:20:51,807 - fabric_to_espanso - INFO - Processed: create_graph_from_input
52
+ 2025-01-10 20:20:51,812 - fabric_to_espanso - INFO - Processed: extract_main_idea
53
+ 2025-01-10 20:20:51,816 - fabric_to_espanso - INFO - Processed: extract_latest_video
54
+ 2025-01-10 20:20:51,819 - fabric_to_espanso - INFO - Processed: extract_core_message
55
+ 2025-01-10 20:20:51,823 - fabric_to_espanso - INFO - Processed: extract_jokes
56
+ 2025-01-10 20:20:51,827 - fabric_to_espanso - INFO - Processed: create_academic_paper
57
+ 2025-01-10 20:20:51,831 - fabric_to_espanso - INFO - Processed: create_reading_plan
58
+ 2025-01-10 20:20:51,835 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/added_something_stupid/system.md
59
+ 2025-01-10 20:20:51,839 - fabric_to_espanso - INFO - Processed: added_something_stupid
60
+ 2025-01-10 20:20:51,844 - fabric_to_espanso - INFO - Processed: analyze_risk
61
+ 2025-01-10 20:20:51,848 - fabric_to_espanso - INFO - Processed: improve_report_finding
62
+ 2025-01-10 20:20:51,852 - fabric_to_espanso - INFO - Processed: explain_math
63
+ 2025-01-10 20:20:51,855 - fabric_to_espanso - INFO - Processed: summarize_git_changes
64
+ 2025-01-10 20:20:51,860 - fabric_to_espanso - INFO - Processed: recommend_talkpanel_topics
65
+ 2025-01-10 20:20:51,864 - fabric_to_espanso - INFO - Processed: extract_predictions
66
+ 2025-01-10 20:20:51,868 - fabric_to_espanso - INFO - Processed: extract_primary_solution
67
+ 2025-01-10 20:20:51,872 - fabric_to_espanso - INFO - Processed: extract_videoid
68
+ 2025-01-10 20:20:51,877 - fabric_to_espanso - INFO - Processed: create_show_intro
69
+ 2025-01-10 20:20:51,881 - fabric_to_espanso - INFO - Processed: summarize_git_diff
70
+ 2025-01-10 20:20:51,884 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/website_description/system.md
71
+ 2025-01-10 20:20:51,889 - fabric_to_espanso - INFO - Processed: website_description
72
+ 2025-01-10 20:20:51,892 - fabric_to_espanso - INFO - Processed: create_quiz
73
+ 2025-01-10 20:20:51,897 - fabric_to_espanso - INFO - Processed: write_semgrep_rule
74
+ 2025-01-10 20:20:51,901 - fabric_to_espanso - INFO - Processed: write_hackerone_report
75
+ 2025-01-10 20:20:51,905 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/translate_to_dutch_or_from_dutch_to_english/system.md
76
+ 2025-01-10 20:20:51,909 - fabric_to_espanso - INFO - Processed: translate_to_dutch_or_from_dutch_to_english
77
+ 2025-01-10 20:20:51,913 - fabric_to_espanso - INFO - Processed: summarize_micro
78
+ 2025-01-10 20:20:51,917 - fabric_to_espanso - INFO - Processed: create_ai_jobs_analysis
79
+ 2025-01-10 20:20:51,920 - fabric_to_espanso - INFO - Processed: create_pattern
80
+ 2025-01-10 20:20:51,924 - fabric_to_espanso - INFO - Processed: capture_thinkers_work
81
+ 2025-01-10 20:20:51,929 - fabric_to_espanso - INFO - Processed: analyze_prose_pinker
82
+ 2025-01-10 20:20:51,933 - fabric_to_espanso - INFO - Processed: create_threat_scenarios
83
+ 2025-01-10 20:20:51,938 - fabric_to_espanso - INFO - Processed: extract_ctf_writeup
84
+ 2025-01-10 20:20:51,941 - fabric_to_espanso - INFO - Processed: create_fabric_patterns
85
+ 2025-01-10 20:20:51,946 - fabric_to_espanso - INFO - Processed: ai
86
+ 2025-01-10 20:20:51,950 - fabric_to_espanso - INFO - Processed: rate_ai_response
87
+ 2025-01-10 20:20:51,954 - fabric_to_espanso - INFO - Processed: create_prd
88
+ 2025-01-10 20:20:51,958 - fabric_to_espanso - INFO - Processed: clean_text
89
+ 2025-01-10 20:20:51,962 - fabric_to_espanso - INFO - Processed: create_video_chapters
90
+ 2025-01-10 20:20:51,966 - fabric_to_espanso - INFO - Processed: summarize_lecture
91
+ 2025-01-10 20:20:51,971 - fabric_to_espanso - INFO - Processed: identify_dsrp_perspectives
92
+ 2025-01-10 20:20:51,975 - fabric_to_espanso - INFO - Processed: recommend_artists
93
+ 2025-01-10 20:20:51,979 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed/system.md
94
+ 2025-01-10 20:20:51,983 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed
95
+ 2025-01-10 20:20:51,987 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed_extreme/system.md
96
+ 2025-01-10 20:20:51,992 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed_extreme
97
+ 2025-01-10 20:20:51,996 - fabric_to_espanso - INFO - Processed: extract_ideas
98
+ 2025-01-10 20:20:52,000 - fabric_to_espanso - INFO - Processed: to_flashcards
99
+ 2025-01-10 20:20:52,016 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/extract_instructions/system.md
100
+ 2025-01-10 20:20:52,028 - fabric_to_espanso - INFO - Processed: extract_instructions
101
+ 2025-01-10 20:20:52,032 - fabric_to_espanso - INFO - Processed: write_micro_essay
102
+ 2025-01-10 20:20:52,036 - fabric_to_espanso - INFO - Processed: extract_keywords_and_subjects_from_text
103
+ 2025-01-10 20:20:52,045 - fabric_to_espanso - INFO - Processed: extract_primary_problem
104
+ 2025-01-10 20:20:52,053 - fabric_to_espanso - INFO - Processed: create_hormozi_offer
105
+ 2025-01-10 20:20:52,059 - fabric_to_espanso - INFO - Processed: analyze_prose
106
+ 2025-01-10 20:20:52,064 - fabric_to_espanso - INFO - Processed: analyze_logs
107
+ 2025-01-10 20:20:52,068 - fabric_to_espanso - INFO - Processed: create_recursive_outline
108
+ 2025-01-10 20:20:52,072 - fabric_to_espanso - INFO - Processed: analyze_tech_impact
109
+ 2025-01-10 20:20:52,077 - fabric_to_espanso - INFO - Processed: find_hidden_message
110
+ 2025-01-10 20:20:52,088 - fabric_to_espanso - INFO - Processed: create_npc
111
+ 2025-01-10 20:20:52,104 - fabric_to_espanso - INFO - Processed: provide_guidance
112
+ 2025-01-10 20:20:52,113 - fabric_to_espanso - INFO - Processed: export_data_as_csv
113
+ 2025-01-10 20:20:52,120 - fabric_to_espanso - INFO - Processed: show_fabric_options_markmap
114
+ 2025-01-10 20:20:52,127 - fabric_to_espanso - INFO - Processed: summarize_debate
115
+ 2025-01-10 20:20:52,134 - fabric_to_espanso - INFO - Processed: answer_interview_question
116
+ 2025-01-10 20:20:52,141 - fabric_to_espanso - INFO - Processed: extract_poc
117
+ 2025-01-10 20:20:52,148 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process/system.md
118
+ 2025-01-10 20:20:52,155 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process
119
+ 2025-01-10 20:20:52,163 - fabric_to_espanso - INFO - Processed: rate_content
120
+ 2025-01-10 20:20:52,170 - fabric_to_espanso - INFO - Processed: create_diy
121
+ 2025-01-10 20:20:52,178 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/create_costar_prompt/system.md
122
+ 2025-01-10 20:20:52,186 - fabric_to_espanso - INFO - Processed: create_costar_prompt
123
+ 2025-01-10 20:20:52,193 - fabric_to_espanso - INFO - Processed: create_idea_compass
124
+ 2025-01-10 20:20:52,200 - fabric_to_espanso - INFO - Processed: create_security_update
125
+ 2025-01-10 20:20:52,206 - fabric_to_espanso - INFO - Processed: extract_recommendations
126
+ 2025-01-10 20:20:52,214 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/md_callout/system.md
127
+ 2025-01-10 20:20:52,222 - fabric_to_espanso - INFO - Processed: md_callout
128
+ 2025-01-10 20:20:52,229 - fabric_to_espanso - INFO - Processed: analyze_threat_report
129
+ 2025-01-10 20:20:52,236 - fabric_to_espanso - INFO - Processed: dialog_with_socrates
130
+ 2025-01-10 20:20:52,245 - fabric_to_espanso - INFO - Processed: summarize_newsletter
131
+ 2025-01-10 20:20:52,251 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization
132
+ 2025-01-10 20:20:52,258 - fabric_to_espanso - INFO - Processed: analyze_comments
133
+ 2025-01-10 20:20:52,265 - fabric_to_espanso - INFO - Processed: summarize
134
+ 2025-01-10 20:20:52,271 - fabric_to_espanso - INFO - Processed: compare_and_contrast
135
+ 2025-01-10 20:20:52,278 - fabric_to_espanso - INFO - Successfully processed 199 files in fabric patterns folder
136
+ 2025-01-10 20:20:52,323 - fabric_to_espanso - INFO - Changes detected: 0 new, 0 modified, 0 deleted
src/search_qdrant/logs/fabric_to_espanso.log.3 ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-01-10 20:20:51,570 - fabric_to_espanso - INFO - Processed: extract_wisdom_dm
2
+ 2025-01-10 20:20:51,583 - fabric_to_espanso - INFO - Processed: create_ttrc_narrative
3
+ 2025-01-10 20:20:51,597 - fabric_to_espanso - INFO - Processed: prepare_7s_strategy
4
+ 2025-01-10 20:20:51,601 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/convert_to_markdown/system.md
5
+ 2025-01-10 20:20:51,606 - fabric_to_espanso - INFO - Processed: convert_to_markdown
6
+ 2025-01-10 20:20:51,611 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/analyze_incident/system.md
7
+ 2025-01-10 20:20:51,615 - fabric_to_espanso - INFO - Processed: analyze_incident
8
+ 2025-01-10 20:20:51,619 - fabric_to_espanso - INFO - Processed: summarize_meeting
9
+ 2025-01-10 20:20:51,623 - fabric_to_espanso - INFO - Processed: create_formal_email
10
+ 2025-01-10 20:20:51,627 - fabric_to_espanso - INFO - Processed: refine_design_document
11
+ 2025-01-10 20:20:51,631 - fabric_to_espanso - INFO - Processed: improve_prompt
12
+ 2025-01-10 20:20:51,636 - fabric_to_espanso - INFO - Processed: create_logo
13
+ 2025-01-10 20:20:51,640 - fabric_to_espanso - INFO - Processed: create_network_threat_landscape
14
+ 2025-01-10 20:20:51,645 - fabric_to_espanso - INFO - Processed: extract_most_redeeming_thing
15
+ 2025-01-10 20:20:51,649 - fabric_to_espanso - INFO - Processed: create_rpg_summary
16
+ 2025-01-10 20:20:51,653 - fabric_to_espanso - INFO - Processed: analyze_proposition
17
+ 2025-01-10 20:20:51,658 - fabric_to_espanso - INFO - Processed: write_nuclei_template_rule
18
+ 2025-01-10 20:20:51,664 - fabric_to_espanso - INFO - Processed: analyze_email_headers
19
+ 2025-01-10 20:20:51,668 - fabric_to_espanso - INFO - Processed: analyze_presentation
20
+ 2025-01-10 20:20:51,672 - fabric_to_espanso - INFO - Processed: improve_writing
21
+ 2025-01-10 20:20:51,677 - fabric_to_espanso - INFO - Processed: create_user_story
22
+ 2025-01-10 20:20:51,682 - fabric_to_espanso - INFO - Processed: create_stride_threat_model
23
+ 2025-01-10 20:20:51,686 - fabric_to_espanso - INFO - Processed: analyze_debate
24
+ 2025-01-10 20:20:51,691 - fabric_to_espanso - INFO - Processed: analyze_spiritual_text
25
+ 2025-01-10 20:20:51,696 - fabric_to_espanso - INFO - Processed: write_pull-request
26
+ 2025-01-10 20:20:51,701 - fabric_to_espanso - INFO - Processed: extract_insights_dm
27
+ 2025-01-10 20:20:51,705 - fabric_to_espanso - INFO - Processed: analyze_military_strategy
28
+ 2025-01-10 20:20:51,709 - fabric_to_espanso - INFO - Processed: analyze_personality
29
+ 2025-01-10 20:20:51,714 - fabric_to_espanso - INFO - Processed: transcribe_minutes
30
+ 2025-01-10 20:20:51,718 - fabric_to_espanso - INFO - Processed: extract_recipe
31
+ 2025-01-10 20:20:51,722 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/write_python_code_with_explanations/system.md
32
+ 2025-01-10 20:20:51,726 - fabric_to_espanso - INFO - Processed: write_python_code_with_explanations
33
+ 2025-01-10 20:20:51,730 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/summarize_paper/system.md
34
+ 2025-01-10 20:20:51,734 - fabric_to_espanso - INFO - Processed: summarize_paper
35
+ 2025-01-10 20:20:51,738 - fabric_to_espanso - INFO - Processed: check_agreement
36
+ 2025-01-10 20:20:51,744 - fabric_to_espanso - INFO - Processed: find_logical_fallacies
37
+ 2025-01-10 20:20:51,749 - fabric_to_espanso - INFO - Processed: extract_wisdom
38
+ 2025-01-10 20:20:51,753 - fabric_to_espanso - INFO - Processed: extract_wisdom_nometa
39
+ 2025-01-10 20:20:51,757 - fabric_to_espanso - INFO - Processed: create_image_prompt_from_book_extract
40
+ 2025-01-10 20:20:51,762 - fabric_to_espanso - INFO - Processed: identify_dsrp_distinctions
41
+ 2025-01-10 20:20:51,766 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/rewrite_python_code_with_explanations/system.md
42
+ 2025-01-10 20:20:51,770 - fabric_to_espanso - INFO - Processed: rewrite_python_code_with_explanations
43
+ 2025-01-10 20:20:51,774 - fabric_to_espanso - INFO - Processed: extract_controversial_ideas
44
+ 2025-01-10 20:20:51,779 - fabric_to_espanso - INFO - Processed: create_tags
45
+ 2025-01-10 20:20:51,783 - fabric_to_espanso - INFO - Processed: review_design
46
+ 2025-01-10 20:20:51,787 - fabric_to_espanso - INFO - Processed: create_art_prompt
47
+ 2025-01-10 20:20:51,791 - fabric_to_espanso - INFO - Processed: analyze_patent
48
+ 2025-01-10 20:20:51,795 - fabric_to_espanso - INFO - Processed: identify_dsrp_relationships
49
+ 2025-01-10 20:20:51,799 - fabric_to_espanso - INFO - Processed: analyze_cfp_submission
50
+ 2025-01-10 20:20:51,803 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization_for_github
51
+ 2025-01-10 20:20:51,807 - fabric_to_espanso - INFO - Processed: create_graph_from_input
52
+ 2025-01-10 20:20:51,812 - fabric_to_espanso - INFO - Processed: extract_main_idea
53
+ 2025-01-10 20:20:51,816 - fabric_to_espanso - INFO - Processed: extract_latest_video
54
+ 2025-01-10 20:20:51,819 - fabric_to_espanso - INFO - Processed: extract_core_message
55
+ 2025-01-10 20:20:51,823 - fabric_to_espanso - INFO - Processed: extract_jokes
56
+ 2025-01-10 20:20:51,827 - fabric_to_espanso - INFO - Processed: create_academic_paper
57
+ 2025-01-10 20:20:51,831 - fabric_to_espanso - INFO - Processed: create_reading_plan
58
+ 2025-01-10 20:20:51,835 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/added_something_stupid/system.md
59
+ 2025-01-10 20:20:51,839 - fabric_to_espanso - INFO - Processed: added_something_stupid
60
+ 2025-01-10 20:20:51,844 - fabric_to_espanso - INFO - Processed: analyze_risk
61
+ 2025-01-10 20:20:51,848 - fabric_to_espanso - INFO - Processed: improve_report_finding
62
+ 2025-01-10 20:20:51,852 - fabric_to_espanso - INFO - Processed: explain_math
63
+ 2025-01-10 20:20:51,855 - fabric_to_espanso - INFO - Processed: summarize_git_changes
64
+ 2025-01-10 20:20:51,860 - fabric_to_espanso - INFO - Processed: recommend_talkpanel_topics
65
+ 2025-01-10 20:20:51,864 - fabric_to_espanso - INFO - Processed: extract_predictions
66
+ 2025-01-10 20:20:51,868 - fabric_to_espanso - INFO - Processed: extract_primary_solution
67
+ 2025-01-10 20:20:51,872 - fabric_to_espanso - INFO - Processed: extract_videoid
68
+ 2025-01-10 20:20:51,877 - fabric_to_espanso - INFO - Processed: create_show_intro
69
+ 2025-01-10 20:20:51,881 - fabric_to_espanso - INFO - Processed: summarize_git_diff
70
+ 2025-01-10 20:20:51,884 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/website_description/system.md
71
+ 2025-01-10 20:20:51,889 - fabric_to_espanso - INFO - Processed: website_description
72
+ 2025-01-10 20:20:51,892 - fabric_to_espanso - INFO - Processed: create_quiz
73
+ 2025-01-10 20:20:51,897 - fabric_to_espanso - INFO - Processed: write_semgrep_rule
74
+ 2025-01-10 20:20:51,901 - fabric_to_espanso - INFO - Processed: write_hackerone_report
75
+ 2025-01-10 20:20:51,905 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/translate_to_dutch_or_from_dutch_to_english/system.md
76
+ 2025-01-10 20:20:51,909 - fabric_to_espanso - INFO - Processed: translate_to_dutch_or_from_dutch_to_english
77
+ 2025-01-10 20:20:51,913 - fabric_to_espanso - INFO - Processed: summarize_micro
78
+ 2025-01-10 20:20:51,917 - fabric_to_espanso - INFO - Processed: create_ai_jobs_analysis
79
+ 2025-01-10 20:20:51,920 - fabric_to_espanso - INFO - Processed: create_pattern
80
+ 2025-01-10 20:20:51,924 - fabric_to_espanso - INFO - Processed: capture_thinkers_work
81
+ 2025-01-10 20:20:51,929 - fabric_to_espanso - INFO - Processed: analyze_prose_pinker
82
+ 2025-01-10 20:20:51,933 - fabric_to_espanso - INFO - Processed: create_threat_scenarios
83
+ 2025-01-10 20:20:51,938 - fabric_to_espanso - INFO - Processed: extract_ctf_writeup
84
+ 2025-01-10 20:20:51,941 - fabric_to_espanso - INFO - Processed: create_fabric_patterns
85
+ 2025-01-10 20:20:51,946 - fabric_to_espanso - INFO - Processed: ai
86
+ 2025-01-10 20:20:51,950 - fabric_to_espanso - INFO - Processed: rate_ai_response
87
+ 2025-01-10 20:20:51,954 - fabric_to_espanso - INFO - Processed: create_prd
88
+ 2025-01-10 20:20:51,958 - fabric_to_espanso - INFO - Processed: clean_text
89
+ 2025-01-10 20:20:51,962 - fabric_to_espanso - INFO - Processed: create_video_chapters
90
+ 2025-01-10 20:20:51,966 - fabric_to_espanso - INFO - Processed: summarize_lecture
91
+ 2025-01-10 20:20:51,971 - fabric_to_espanso - INFO - Processed: identify_dsrp_perspectives
92
+ 2025-01-10 20:20:51,975 - fabric_to_espanso - INFO - Processed: recommend_artists
93
+ 2025-01-10 20:20:51,979 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed/system.md
94
+ 2025-01-10 20:20:51,983 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed
95
+ 2025-01-10 20:20:51,987 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process_detailed_extreme/system.md
96
+ 2025-01-10 20:20:51,992 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process_detailed_extreme
97
+ 2025-01-10 20:20:51,996 - fabric_to_espanso - INFO - Processed: extract_ideas
98
+ 2025-01-10 20:20:52,000 - fabric_to_espanso - INFO - Processed: to_flashcards
99
+ 2025-01-10 20:20:52,016 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/extract_instructions/system.md
100
+ 2025-01-10 20:20:52,028 - fabric_to_espanso - INFO - Processed: extract_instructions
101
+ 2025-01-10 20:20:52,032 - fabric_to_espanso - INFO - Processed: write_micro_essay
102
+ 2025-01-10 20:20:52,036 - fabric_to_espanso - INFO - Processed: extract_keywords_and_subjects_from_text
103
+ 2025-01-10 20:20:52,045 - fabric_to_espanso - INFO - Processed: extract_primary_problem
104
+ 2025-01-10 20:20:52,053 - fabric_to_espanso - INFO - Processed: create_hormozi_offer
105
+ 2025-01-10 20:20:52,059 - fabric_to_espanso - INFO - Processed: analyze_prose
106
+ 2025-01-10 20:20:52,064 - fabric_to_espanso - INFO - Processed: analyze_logs
107
+ 2025-01-10 20:20:52,068 - fabric_to_espanso - INFO - Processed: create_recursive_outline
108
+ 2025-01-10 20:20:52,072 - fabric_to_espanso - INFO - Processed: analyze_tech_impact
109
+ 2025-01-10 20:20:52,077 - fabric_to_espanso - INFO - Processed: find_hidden_message
110
+ 2025-01-10 20:20:52,088 - fabric_to_espanso - INFO - Processed: create_npc
111
+ 2025-01-10 20:20:52,104 - fabric_to_espanso - INFO - Processed: provide_guidance
112
+ 2025-01-10 20:20:52,113 - fabric_to_espanso - INFO - Processed: export_data_as_csv
113
+ 2025-01-10 20:20:52,120 - fabric_to_espanso - INFO - Processed: show_fabric_options_markmap
114
+ 2025-01-10 20:20:52,127 - fabric_to_espanso - INFO - Processed: summarize_debate
115
+ 2025-01-10 20:20:52,134 - fabric_to_espanso - INFO - Processed: answer_interview_question
116
+ 2025-01-10 20:20:52,141 - fabric_to_espanso - INFO - Processed: extract_poc
117
+ 2025-01-10 20:20:52,148 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/solveitwithcode_review_repl_driven_process/system.md
118
+ 2025-01-10 20:20:52,155 - fabric_to_espanso - INFO - Processed: solveitwithcode_review_repl_driven_process
119
+ 2025-01-10 20:20:52,163 - fabric_to_espanso - INFO - Processed: rate_content
120
+ 2025-01-10 20:20:52,170 - fabric_to_espanso - INFO - Processed: create_diy
121
+ 2025-01-10 20:20:52,178 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/create_costar_prompt/system.md
122
+ 2025-01-10 20:20:52,186 - fabric_to_espanso - INFO - Processed: create_costar_prompt
123
+ 2025-01-10 20:20:52,193 - fabric_to_espanso - INFO - Processed: create_idea_compass
124
+ 2025-01-10 20:20:52,200 - fabric_to_espanso - INFO - Processed: create_security_update
125
+ 2025-01-10 20:20:52,206 - fabric_to_espanso - INFO - Processed: extract_recommendations
126
+ 2025-01-10 20:20:52,214 - fabric_to_espanso - WARNING - No sections extracted from /home/jelle/.config/fabric/patterns/md_callout/system.md
127
+ 2025-01-10 20:20:52,222 - fabric_to_espanso - INFO - Processed: md_callout
128
+ 2025-01-10 20:20:52,229 - fabric_to_espanso - INFO - Processed: analyze_threat_report
129
+ 2025-01-10 20:20:52,236 - fabric_to_espanso - INFO - Processed: dialog_with_socrates
130
+ 2025-01-10 20:20:52,245 - fabric_to_espanso - INFO - Processed: summarize_newsletter
131
+ 2025-01-10 20:20:52,251 - fabric_to_espanso - INFO - Processed: create_mermaid_visualization
132
+ 2025-01-10 20:20:52,258 - fabric_to_espanso - INFO - Processed: analyze_comments
133
+ 2025-01-10 20:20:52,265 - fabric_to_espanso - INFO - Processed: summarize
134
+ 2025-01-10 20:20:52,271 - fabric_to_espanso - INFO - Processed: compare_and_contrast
135
+ 2025-01-10 20:20:52,278 - fabric_to_espanso - INFO - Successfully processed 199 files in fabric patterns folder
136
+ 2025-01-10 20:20:52,323 - fabric_to_espanso - INFO - Changes detected: 0 new, 0 modified, 0 deleted
src/search_qdrant/logs/fabric_to_espanso.log.4 ADDED
The diff for this file is too large to render. See raw diff
 
src/search_qdrant/run_query.sh ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Add the project root to PYTHONPATH
4
+ export PYTHONPATH="/home/jelle/Tools/pythagora-core/workspace/fabric-to-espanso:$PYTHONPATH"
5
+
6
+ # Run the query script with all arguments passed to this script
7
+ python src/search_qdrant/database_query.py "$@"
src/search_qdrant/run_streamlit.bup2 ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Add the project root to PYTHONPATH
4
+ export PYTHONPATH="/home/jelle/Tools/pythagora-core/workspace/fabric-to-espanso:$PYTHONPATH"
5
+
6
+ # Run the streamlit app
7
+ /home/jelle/Tools/pythagora-core/workspace/fabric-to-espanso/.venv/bin/streamlit run ~/Tools/pythagora-core/workspace/fabric-to-espanso/src/search_qdrant/streamlit_app.py > nohup.out 2>&1 &
8
+ PID=$!
9
+
10
+ until grep -q "You can now view your Streamlit app" nohup.out
11
+ do
12
+ if ! kill -0 $PID 2>/dev/null; then
13
+ echo "Streamlit failed to start"
14
+ exit 1
15
+ fi
16
+ sleep 1
17
+ done
18
+
19
+ cat nohup.out | grep -A 3 "You can now view your Streamlit app"
20
+
21
+ sleep 3
22
+
23
+ exit
src/search_qdrant/run_streamlit.sh ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Add the project root to PYTHONPATH
4
+ export PYTHONPATH="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso:$PYTHONPATH"
5
+
6
+ # Create a log directory if it doesn't exist
7
+ LOG_DIR="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/logs"
8
+ mkdir -p "$LOG_DIR"
9
+ LOG_FILE="$LOG_DIR/streamlit.log"
10
+
11
+ # Clean up any existing nohup.out
12
+ if [ -f nohup.out ]; then
13
+ cat /dev/null > nohup.out
14
+ fi
15
+
16
+ # Check if streamlit is already running on port 8501
17
+ if ss -tuln | grep -q ":8501 "; then
18
+ echo "Port 8501 is already in use. No need to start the app again."
19
+ exit 0
20
+ fi
21
+
22
+
23
+ # Run the streamlit app
24
+ echo "Starting Streamlit app..."
25
+ nohup /home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/.venv/bin/streamlit run ~/Tools/pythagora-core/workspace/fabric_to_espanso/src/search_qdrant/streamlit_app.py >> "LOG_FILE" 2>&1 &
26
+
27
+ echo "Streamlit process started with PID: $!"
28
+
29
+ # Wait a moment and check if the process is still running
30
+ sleep 2
31
+ if ps -p $! > /dev/null; then
32
+ echo "Streamlit successfully started"
33
+ else
34
+ echo "Failed to start Streamlit. Check $LOG_FILE for details"
35
+ exit 1
36
+ fi
37
+ # Wait for Streamlit to start and capture its initial output
38
+ # max_attempts=5
39
+ # attempt=0
40
+ # while [ $attempt -lt $max_attempts ]; do
41
+ # if grep -q "You can now view your Streamlit app" streamlit.log; then
42
+ # cat streamlit.log | grep -A 3 "You can now view your Streamlit app"
43
+ # exit 0
44
+ # fi
45
+ # sleep 1
46
+ # ((attempt++))
47
+ # done
48
+
49
+ # echo "Failed to start Streamlit server"
src/search_qdrant/run_streamlit_query_only_terminal_visible.sh ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Add the project root to PYTHONPATH
4
+ export PYTHONPATH="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso:$PYTHONPATH"
5
+
6
+ # Create a log directory if it doesn't exist
7
+ LOG_DIR="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/logs"
8
+ mkdir -p "$LOG_DIR"
9
+ LOG_FILE="$LOG_DIR/streamlit.log"
10
+
11
+ # Run the streamlit app
12
+ echo "Starting Streamlit app..."
13
+ /home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/.venv/bin/streamlit run ~/Tools/pythagora-core/workspace/fabric_to_espanso/src/search_qdrant/streamlit_app_query_only.py
src/search_qdrant/run_streamlit_terminal_visible.sh ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Add the project root to PYTHONPATH
4
+ export PYTHONPATH="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso:$PYTHONPATH"
5
+
6
+ # Create a log directory if it doesn't exist
7
+ LOG_DIR="/home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/logs"
8
+ mkdir -p "$LOG_DIR"
9
+ LOG_FILE="$LOG_DIR/streamlit.log"
10
+
11
+ # Run the streamlit app
12
+ echo "Starting Streamlit app..."
13
+ /home/jelle/Tools/pythagora-core/workspace/fabric_to_espanso/.venv/bin/streamlit run ~/Tools/pythagora-core/workspace/fabric_to_espanso/src/search_qdrant/streamlit_app.py
src/search_qdrant/streamlit_app.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pyperclip
3
+ from pathlib import Path
4
+ from src.fabrics_processor.database import initialize_qdrant_database
5
+ from src.fabrics_processor.database_updater import update_qdrant_database
6
+ from src.fabrics_processor.file_change_detector import detect_file_changes
7
+ from src.search_qdrant.database_query import query_qdrant_database
8
+ from src.fabrics_processor.obsidian2fabric import sync_folders
9
+ from src.fabrics_processor.logger import setup_logger
10
+ import logging
11
+ import atexit
12
+ from src.fabrics_processor.config import config
13
+
14
+ # Configure logging
15
+ logger = setup_logger()
16
+
17
+ def init_session_state():
18
+ """Initialize session state variables."""
19
+ if 'client' not in st.session_state:
20
+ client = initialize_qdrant_database(api_key=st.secrets["api_key"])
21
+ st.session_state.client = client
22
+ # Register cleanup function
23
+ atexit.register(lambda: client.close() if hasattr(client, '_transport') else None)
24
+ if 'selected_prompts' not in st.session_state:
25
+ st.session_state.selected_prompts = []
26
+ if 'comparing' not in st.session_state:
27
+ st.session_state.comparing = False
28
+ if 'comparison_selected' not in st.session_state:
29
+ st.session_state.comparison_selected = None
30
+
31
+ def show_comparison_view(prompts):
32
+ """Show a full-width comparison view of the selected prompts."""
33
+ st.write("## Compare Selected Prompts")
34
+
35
+ # Add the back button at the top
36
+ if st.button("Back to search"):
37
+ st.session_state.comparing = False
38
+ st.rerun()
39
+
40
+ # Create columns for each prompt
41
+ cols = st.columns(len(prompts))
42
+
43
+ # Track which prompt is selected for copying
44
+ selected_idx = None
45
+
46
+ for idx, (col, prompt) in enumerate(zip(cols, prompts)):
47
+ with col:
48
+ st.markdown(f"### {prompt.metadata['filename']}")
49
+
50
+ # Create two columns for trigger and button
51
+ trigger_col, button_col = st.columns([0.7, 0.3])
52
+
53
+ with trigger_col:
54
+ # Add trigger field
55
+ current_trigger = prompt.metadata.get('trigger', '')
56
+ new_trigger = st.text_input("Trigger",
57
+ value=current_trigger,
58
+ key=f"trigger_{idx}")
59
+
60
+ # Update trigger if changed
61
+ if new_trigger != current_trigger:
62
+ try:
63
+ st.session_state.client.set_payload(
64
+ collection_name=config.embedding.collection_name,
65
+ payload={"trigger": new_trigger},
66
+ points=[prompt.id]
67
+ )
68
+ st.success(f"Updated trigger to: {new_trigger}")
69
+ except Exception as e:
70
+ st.error(f"Failed to update trigger: {str(e)}")
71
+
72
+ with button_col:
73
+ # Align button with text input using empty space
74
+ st.write("") # This creates some vertical space
75
+ if st.button(f"Use this prompt", key=f"compare_use_{idx}"):
76
+ selected_idx = idx
77
+
78
+ # Display content as markdown
79
+ st.markdown("### Content")
80
+ st.markdown(prompt.metadata["content"])
81
+
82
+ # Handle selection
83
+ if selected_idx is not None:
84
+ pyperclip.copy(prompts[selected_idx].metadata['content'])
85
+ st.success(f"Copied {prompts[selected_idx].metadata['filename']} to clipboard!")
86
+ # Clear comparison view
87
+ st.session_state.comparing = False
88
+ st.rerun()
89
+
90
+ def search_interface():
91
+ """Show the search interface."""
92
+ if st.session_state.comparing:
93
+ show_comparison_view(st.session_state.selected_prompts)
94
+ return
95
+
96
+ st.subheader("Search for prompts")
97
+
98
+ query = st.text_area("What are you trying to accomplish? I will then search for good prompts to give you a good start.")
99
+
100
+ if query:
101
+ try:
102
+ results = query_qdrant_database(
103
+ query=query,
104
+ client=st.session_state.client,
105
+ num_results=5,
106
+ collection_name=config.embedding.collection_name
107
+ )
108
+
109
+ if results:
110
+ st.write("Which prompts would you like to investigate? Max 3.")
111
+
112
+ # Create checkboxes for selection
113
+ selected = []
114
+ for r in results:
115
+ if st.checkbox(f"{r.metadata['filename']}", key=f"select_{r.id}"):
116
+ selected.append(r)
117
+
118
+ st.session_state.selected_prompts = selected
119
+
120
+ if selected:
121
+ col1, col2 = st.columns(2)
122
+ with col1:
123
+ if st.button("Use: copy to clipboard"):
124
+ if len(selected) == 1:
125
+ pyperclip.copy(selected[0].metadata['content'])
126
+ st.success("Copied to clipboard!")
127
+
128
+ with col2:
129
+ if len(selected) > 1 and st.button("Compare"):
130
+ st.session_state.comparing = True
131
+ st.rerun()
132
+
133
+ except Exception as e:
134
+ logger.error(f"Error in search_interface: {e}", exc_info=True)
135
+ st.error(f"Error searching database: {e}")
136
+
137
+ def update_database():
138
+ """Update the markdown folder with prompt files from Obsidian.
139
+ Then update the Qdrant database.
140
+ Finally based on the Qdrant database create a new espanso YAML file and
141
+ the Obsidian Textgenerator markdown files."""
142
+ try:
143
+ with st.spinner("Processing markdown files..."):
144
+ # First check if there are any changes in the prompt files in Obsidian.
145
+ # If so, add them to the markdown folder before updating the database.
146
+ sync_folders(source_dir=Path(config.obsidian_input_folder), target_dir=Path(config.fabric_patterns_folder))
147
+
148
+ # Get current collection info
149
+ collection_info = st.session_state.client.get_collection(config.embedding.collection_name)
150
+ initial_points = collection_info.points_count
151
+
152
+ # Detect file changes
153
+ new_files, modified_files, deleted_files = detect_file_changes(
154
+ client=st.session_state.client,
155
+ fabric_patterns_folder=config.fabric_patterns_folder
156
+ )
157
+
158
+ # Update the database if chenges are detected
159
+ if any([new_files, modified_files, deleted_files]):
160
+ update_qdrant_database(
161
+ client=st.session_state.client,
162
+ collection_name=config.embedding.collection_name,
163
+ new_files=new_files,
164
+ modified_files=modified_files,
165
+ deleted_files=deleted_files
166
+ )
167
+
168
+ # Get updated collection info
169
+ collection_info = st.session_state.client.get_collection(config.embedding.collection_name)
170
+ final_points = collection_info.points_count
171
+
172
+ # Show summary
173
+ st.success(f"""
174
+ Database update completed successfully!
175
+
176
+ Changes detected:
177
+ - {len(new_files)} new files
178
+ - {len(modified_files)} modified files
179
+ - {len(deleted_files)} deleted files
180
+
181
+ Database entries:
182
+ - Initial: {initial_points}
183
+ - Final: {final_points}
184
+ """)
185
+
186
+ except Exception as e:
187
+ logger.error(f"Error updating database: {e}", exc_info=True)
188
+ st.error(f"Error updating database: {e}")
189
+
190
+ def display_trigger_table():
191
+ """Display the trigger table in the sidebar."""
192
+ with st.sidebar:
193
+ # Add some space to push the table to the bottom
194
+ st.markdown("<br>" * 10, unsafe_allow_html=True)
195
+
196
+ # Create the table
197
+ st.markdown("""
198
+ | trigger | description |
199
+ |---------|-------------|
200
+ | ;;c | code |
201
+ | ;;s | summarize and extract |
202
+ | ;;t | think |
203
+ """)
204
+
205
+ def main():
206
+ st.set_page_config(
207
+ page_title="Fabric to Espanso Prompt Manager",
208
+ layout="wide")
209
+ init_session_state()
210
+
211
+ # Sidebar
212
+ with st.sidebar:
213
+ # Add logo to sidebar
214
+ image_path = Path(__file__).parent.parent.parent / "data" / "Fab2Esp_transparent.png"
215
+ st.image(str(image_path), width=200, use_container_width=False)
216
+
217
+ st.title("Prompt Manager")
218
+ page = st.radio("Select Option:", ["Search for prompts", "Update database and prompt files"])
219
+
220
+ if st.button("Quit"):
221
+ if hasattr(st.session_state.client, '_transport'):
222
+ st.session_state.client.close()
223
+ st.success("Database connection closed.")
224
+ st.stop()
225
+
226
+ # Main content
227
+ if page == "Search for prompts":
228
+ search_interface()
229
+ else:
230
+ st.subheader("Update Database")
231
+ if st.button("Start Update"):
232
+ update_database()
233
+
234
+ # Add the trigger table at the end
235
+ display_trigger_table()
236
+
237
+ # Add credits at the bottom left
238
+ st.markdown("""
239
+ <style>
240
+ .credits {
241
+ position: fixed;
242
+ left: 1rem;
243
+ bottom: 1rem;
244
+ font-size: 0.8rem;
245
+ color: #666;
246
+ max-width: 600px;
247
+ }
248
+ </style>
249
+ <div class="credits">
250
+ This tool searches the great list of prompts available at <a href="https://github.com/danielmiessler/fabric">https://github.com/danielmiessler/fabric</a>.
251
+ A great commandline utilty build by Daniel Miessler to make the use of LLM more frictionless.<br>
252
+ All credits to him and his fellow fabric builders.
253
+ </div>
254
+ """, unsafe_allow_html=True)
255
+
256
+ if __name__ == "__main__":
257
+ main()
streamlit_app_query_only.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pyperclip
3
+ from src.fabrics_processor.database import initialize_qdrant_database
4
+ from src.search_qdrant.database_query import query_qdrant_database
5
+ from src.fabrics_processor.logger import setup_logger
6
+ import logging
7
+ import atexit
8
+ from src.fabrics_processor.config import config
9
+ import time
10
+
11
+ # Configure logging
12
+ logger = setup_logger()
13
+
14
+ def init_session_state():
15
+ """Initialize session state variables."""
16
+ if 'client' not in st.session_state:
17
+ client = initialize_qdrant_database(api_key=st.secrets["api_key"])
18
+ st.session_state.client = client
19
+ # Register cleanup function
20
+ atexit.register(lambda: client.close() if hasattr(client, '_transport') else None)
21
+ if 'selected_prompts' not in st.session_state:
22
+ st.session_state.selected_prompts = []
23
+ if 'comparing' not in st.session_state:
24
+ st.session_state.comparing = False
25
+ if 'comparison_selected' not in st.session_state:
26
+ st.session_state.comparison_selected = None
27
+ if 'status_key' not in st.session_state:
28
+ st.session_state.status_key = 0
29
+
30
+ def show_comparison_view(prompts):
31
+ """Show a full-width comparison view of the selected prompts."""
32
+ st.write("## Compare Selected Prompts")
33
+
34
+ # Add the back button at the top
35
+ if st.button("Back to search"):
36
+ st.session_state.comparing = False
37
+ st.rerun()
38
+
39
+ # Create columns for each prompt
40
+ cols = st.columns(len(prompts))
41
+
42
+ # Track which prompt is selected for copying
43
+ selected_idx = None
44
+
45
+ for idx, (col, prompt) in enumerate(zip(cols, prompts)):
46
+ with col:
47
+ st.markdown(f"### {prompt.metadata['filename']}")
48
+
49
+ # Display content as markdown
50
+ st.markdown("### Content")
51
+ st.code(prompt.metadata["content"], language="markdown", wrap_lines=True)
52
+
53
+ # Add copy button for each prompt
54
+ if st.button(f"Use this prompt", key=f"compare_use_{idx}"):
55
+ st.code(prompt.metadata["content"], language="markdown", wrap_lines=True)
56
+ selected_idx = idx
57
+
58
+ # Handle selection
59
+ if selected_idx is not None:
60
+ st.session_state.comparing = False
61
+ st.rerun()
62
+
63
+ def search_interface():
64
+ """Show the search interface."""
65
+ if st.session_state.comparing:
66
+ show_comparison_view(st.session_state.selected_prompts)
67
+ return
68
+
69
+ query = st.text_area("What are you trying to accomplish? I will then search for good prompts to give you a good start.")
70
+
71
+ if query:
72
+ try:
73
+ results = query_qdrant_database(
74
+ query=query,
75
+ client=st.session_state.client,
76
+ num_results=5,
77
+ collection_name=config.embedding.collection_name
78
+ )
79
+
80
+ if results:
81
+ st.write("Which prompts would you like to investigate? Max 3.")
82
+
83
+ # Create checkboxes for selection
84
+ selected = []
85
+ for r in results:
86
+ if st.checkbox(f"{r.metadata['filename']}", key=f"select_{r.id}"):
87
+ selected.append(r)
88
+
89
+ st.session_state.selected_prompts = selected
90
+
91
+ if selected:
92
+ col1, col2 = st.columns(2)
93
+ with col1:
94
+ if len(selected) == 1:
95
+ st.code(selected[0].metadata["content"], language="markdown", wrap_lines=True)
96
+
97
+ with col2:
98
+ if len(selected) > 1 and st.button("Compare"):
99
+ st.session_state.comparing = True
100
+ st.rerun()
101
+ except Exception as e:
102
+ logger.error(f"Error in search_interface: {e}", exc_info=True)
103
+ st.error(f"Error searching database: {e}")
104
+
105
+ def main():
106
+ """Main function to run the Streamlit app."""
107
+ st.set_page_config(
108
+ page_title="Find fabric prompts",
109
+ page_icon="🔍",
110
+ layout="wide"
111
+ )
112
+
113
+ st.title("Find fabric prompts")
114
+
115
+ try:
116
+ init_session_state()
117
+ search_interface()
118
+
119
+ # Add credits at the bottom left
120
+ st.markdown("""
121
+ <style>
122
+ .credits {
123
+ position: fixed;
124
+ left: 1rem;
125
+ bottom: 1rem;
126
+ font-size: 0.8rem;
127
+ color: #666;
128
+ max-width: 600px;
129
+ }
130
+ </style>
131
+ <div class="credits">
132
+ This tool searches the great list of prompts available at <a href="https://github.com/danielmiessler/fabric">https://github.com/danielmiessler/fabric</a>.
133
+ A great commandline utilty build by Daniel Miessler to make the use of LLM more frictionless.<br>
134
+ All credits to him and his fellow fabric builders.
135
+ </div>
136
+ """, unsafe_allow_html=True)
137
+
138
+ except Exception as e:
139
+ logger.error(f"Error in main: {str(e)}")
140
+ st.error(f"An error occurred: {str(e)}")
141
+
142
+ if __name__ == "__main__":
143
+ main()
tests/__init__.py ADDED
File without changes