eggacheb commited on
Commit
21db53c
·
verified ·
1 Parent(s): 9e45e7c

Upload 97 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .dockerignore +43 -0
  2. .gitattributes +2 -0
  3. .github/ISSUE_TEMPLATE/bug-report---错误反馈.md +28 -0
  4. .github/ISSUE_TEMPLATE/config.yml +5 -0
  5. .github/ISSUE_TEMPLATE/feature-request---功能请求.md +20 -0
  6. .github/dependabot.yml +11 -0
  7. .github/page_build/_config.yml +1 -0
  8. .github/workflows/jekyll-gh-pages.yml +61 -0
  9. .github/workflows/prod.yml +74 -0
  10. .github/workflows/test_lint.yml +50 -0
  11. .gitignore +246 -0
  12. .idea/.gitignore +8 -0
  13. .idea/NekoImageGallery.iml +13 -0
  14. .idea/inspectionProfiles/Project_Default.xml +14 -0
  15. .idea/misc.xml +7 -0
  16. .idea/modules.xml +8 -0
  17. .idea/vcs.xml +6 -0
  18. LICENSE +661 -0
  19. app/Controllers/admin.py +164 -0
  20. app/Controllers/images.py +43 -0
  21. app/Controllers/search.py +214 -0
  22. app/Models/__init__.py +0 -0
  23. app/Models/api_models/__init__.py +0 -0
  24. app/Models/api_models/admin_api_model.py +31 -0
  25. app/Models/api_models/admin_query_params.py +48 -0
  26. app/Models/api_models/search_api_model.py +30 -0
  27. app/Models/api_response/admin_api_response.py +21 -0
  28. app/Models/api_response/base.py +25 -0
  29. app/Models/api_response/images_api_response.py +25 -0
  30. app/Models/api_response/search_api_response.py +8 -0
  31. app/Models/errors.py +10 -0
  32. app/Models/img_data.py +53 -0
  33. app/Models/query_params.py +56 -0
  34. app/Models/search_result.py +7 -0
  35. app/Services/__init__.py +0 -0
  36. app/Services/authentication.py +32 -0
  37. app/Services/index_service.py +60 -0
  38. app/Services/lifespan_service.py +6 -0
  39. app/Services/ocr_services.py +115 -0
  40. app/Services/provider.py +56 -0
  41. app/Services/storage/__init__.py +27 -0
  42. app/Services/storage/base.py +146 -0
  43. app/Services/storage/disabled_storage.py +43 -0
  44. app/Services/storage/exception.py +30 -0
  45. app/Services/storage/local_storage.py +145 -0
  46. app/Services/storage/s3_compatible_storage.py +173 -0
  47. app/Services/transformers_service.py +70 -0
  48. app/Services/upload_service.py +108 -0
  49. app/Services/vector_db_context.py +334 -0
  50. app/__init__.py +6 -0
.dockerignore ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Include any files or directories that you don't want to be copied to your
2
+ # container here (e.g., local build artifacts, temporary files, etc.).
3
+ #
4
+ # For more help, visit the .dockerignore file reference guide at
5
+ # https://docs.docker.com/engine/reference/builder/#dockerignore-file
6
+
7
+ **/.DS_Store
8
+ **/__pycache__
9
+ **/.venv
10
+ **/.classpath
11
+ **/.dockerignore
12
+ **/.env
13
+ **/.git
14
+ **/.gitignore
15
+ **/.project
16
+ **/.settings
17
+ **/.toolstarget
18
+ **/.vs
19
+ **/.vscode
20
+ **/*.*proj.user
21
+ **/*.dbmdl
22
+ **/*.jfm
23
+ **/.idea
24
+ **/bin
25
+ **/charts
26
+ **/docker-compose*
27
+ **/compose*
28
+ **/*Dockerfile*
29
+ **/node_modules
30
+ **/npm-debug.log
31
+ **/obj
32
+ **/secrets.dev.yaml
33
+ **/values.dev.yaml
34
+ **/venv
35
+
36
+ .github/
37
+ static/
38
+ web/
39
+ tests/
40
+ pylintrc.toml
41
+ LICENSE
42
+ readme.md
43
+ readme_cn.md
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ web/screenshots/1.png filter=lfs diff=lfs merge=lfs -text
37
+ web/screenshots/2.png filter=lfs diff=lfs merge=lfs -text
.github/ISSUE_TEMPLATE/bug-report---错误反馈.md ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ name: Bug report / 错误反馈
3
+ about: Create a report to help us improve 报告您在使用本项目过程中遇到的Bug。
4
+ title: ''
5
+ labels: bug
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ ## Environment
11
+ NekoImageGallery version: Place the version of NekoImageGallery you're using here. 在此处添加您正在使用的NekoImageGallery版本。
12
+
13
+ Deployment Method: `Local / Docker`
14
+
15
+ ## Describe the bug
16
+ <!-- A clear and concise description of what the bug is. 简要介绍您遇到的问题。-->
17
+
18
+ ## To Reproduce
19
+ <!-- Steps to reproduce the behavior (including config, environment setup if necessary). 简要说明复现此BUG的步骤(必要时包含配置信息,环境信息)-->
20
+
21
+ ## Expected behavior
22
+ <!--A clear and concise description of what you expected to happen. 简要说明您期望应用实现的行为。-->
23
+
24
+ ## Screenshots
25
+ <!-- If applicable, add screenshots to help explain your problem. 必要时可以使用截图说明此BUG。-->
26
+
27
+ ## Additional context
28
+ <!-- Add any other context about the problem here. 在此处补充其它信息(如有必要)-->
.github/ISSUE_TEMPLATE/config.yml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ blank_issues_enabled: true
2
+ contact_links:
3
+ - name: Ask a question about the project. 询问有关本项目的问题。
4
+ url: https://github.com/hv0905/NekoImageGallery/discussions/new?category=q-a
5
+ about: Ask a question if you encounter a problem when using NekoImageGallery. Please use this option instead of Bug Report unless you are sure your problem is caused by a bug. 询问在您使用NekoImageGallery过程中遇到的问题。请优先使用此选项(而不是Bug Report),除非您认为您的问题是由NekoImageGallery的BUG造成。
.github/ISSUE_TEMPLATE/feature-request---功能请求.md ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ name: Feature request / 功能请求
3
+ about: Suggest an idea for this project 向本项目提交新的功能建议。
4
+ title: ''
5
+ labels: enhancement
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Is your feature request related to a problem? Please describe.**
11
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12
+
13
+ **Describe the solution you'd like**
14
+ A clear and concise description of what you want to happen.
15
+
16
+ **Describe alternatives you've considered**
17
+ A clear and concise description of any alternative solutions or features you've considered.
18
+
19
+ **Additional context**
20
+ Add any other context or screenshots about the feature request here.
.github/dependabot.yml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # To get started with Dependabot version updates, you'll need to specify which
2
+ # package ecosystems to update and where the package manifests are located.
3
+ # Please see the documentation for all configuration options:
4
+ # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5
+
6
+ version: 2
7
+ updates:
8
+ - package-ecosystem: "pip" # See documentation for possible values
9
+ directory: "/" # Location of package manifests
10
+ schedule:
11
+ interval: "weekly"
.github/page_build/_config.yml ADDED
@@ -0,0 +1 @@
 
 
1
+ theme: jekyll-theme-cayman
.github/workflows/jekyll-gh-pages.yml ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy project pages
2
+
3
+ on:
4
+ # Runs on pushes targeting the default branch
5
+ push:
6
+ branches: ["master"]
7
+ paths:
8
+ - '**/*.md'
9
+ - '**/*.png' # for screenshots
10
+ - 'page_build/**'
11
+ - '.github/workflows/**'
12
+
13
+ # Allows you to run this workflow manually from the Actions tab
14
+ workflow_dispatch:
15
+
16
+ # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
17
+ permissions:
18
+ contents: read
19
+ pages: write
20
+ id-token: write
21
+
22
+ # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
23
+ # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
24
+ concurrency:
25
+ group: "pages"
26
+ cancel-in-progress: false
27
+
28
+ jobs:
29
+ # Build job
30
+ build:
31
+ runs-on: ubuntu-latest
32
+ steps:
33
+ - name: Checkout
34
+ uses: actions/checkout@v4
35
+ - name: Setup Pages
36
+ uses: actions/configure-pages@v4
37
+ - name: prepare site src
38
+ run: |
39
+ mkdir -p .github/page_build
40
+ cp *.md .github/page_build/
41
+ cp web/ .github/page_build/ -r
42
+ ls -lR .github/page_build/
43
+ - name: Build with Jekyll
44
+ uses: actions/jekyll-build-pages@v1
45
+ with:
46
+ source: ./.github/page_build/
47
+ destination: ./_site
48
+ - name: Upload artifact
49
+ uses: actions/upload-pages-artifact@v3
50
+
51
+ # Deployment job
52
+ deploy:
53
+ environment:
54
+ name: github-pages
55
+ url: ${{ steps.deployment.outputs.page_url }}
56
+ runs-on: ubuntu-latest
57
+ needs: build
58
+ steps:
59
+ - name: Deploy to GitHub Pages
60
+ id: deployment
61
+ uses: actions/deploy-pages@v4
.github/workflows/prod.yml ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Check & deploy to DockerHub
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - 'master'
7
+ tags:
8
+ - '*'
9
+ workflow_dispatch:
10
+
11
+ jobs:
12
+ perform-check:
13
+ uses: ./.github/workflows/test_lint.yml
14
+ secrets: inherit
15
+ docker:
16
+ runs-on: ubuntu-latest
17
+ environment: DockerHub
18
+ needs:
19
+ - perform-check
20
+ strategy:
21
+ matrix:
22
+ configurations:
23
+ - dockerfile: "Dockerfile"
24
+ suffixes: |
25
+ ""
26
+ "-cuda"
27
+ "-cuda12.1"
28
+ args: |
29
+ CUDA_VERSION=12.1
30
+ - dockerfile: "Dockerfile"
31
+ suffixes: '"-cuda11.8"'
32
+ args: |
33
+ CUDA_VERSION=11.8
34
+ - dockerfile: "cpu-only.Dockerfile"
35
+ suffixes: '"-cpu"'
36
+ args: ""
37
+ steps:
38
+ - name: Set up Docker Buildx
39
+ uses: docker/setup-buildx-action@v3
40
+ - name: Login to Docker Hub
41
+ uses: docker/login-action@v3
42
+ with:
43
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
44
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
45
+ - name: Docker Meta
46
+ id: docker-meta
47
+ uses: docker/metadata-action@v5
48
+ with:
49
+ images: edgeneko/neko-image-gallery
50
+ tags: |
51
+ type=edge,branch=master
52
+ type=semver,pattern=v{{version}}
53
+ type=semver,pattern=v{{major}}.{{minor}}
54
+ - name: Build combined tags
55
+ id: combine-tags
56
+ run: |
57
+ SUFFIXES=(${{ matrix.configurations.suffixes }})
58
+ echo 'tags<<EOF' >> $GITHUB_OUTPUT
59
+ for SUFFIX in "${SUFFIXES[@]}"; do
60
+ echo '${{ steps.docker-meta.outputs.tags }}' | sed 's/$/'"$SUFFIX"'/' >> $GITHUB_OUTPUT
61
+ done
62
+ echo EOF >> $GITHUB_OUTPUT
63
+
64
+ printf 'cache_tag=%s' "$(echo '${{ steps.docker-meta.outputs.tags }}' | tail -1 | sed 's/$/'"${SUFFIXES[0]}"'/')" >> $GITHUB_OUTPUT
65
+ - name: Build and push
66
+ uses: docker/build-push-action@v5
67
+ with:
68
+ file: ${{ matrix.configurations.dockerfile }}
69
+ push: true
70
+ tags: ${{ steps.combine-tags.outputs.tags }}
71
+ build-args: ${{ matrix.configurations.args }}
72
+ labels: ${{ steps.docker-meta.outputs.labels }}
73
+ cache-from: type=registry,ref=${{steps.combine-tags.outputs.cache_tag}}
74
+ cache-to: type=inline
.github/workflows/test_lint.yml ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Test and Lint Project
2
+
3
+ on:
4
+ workflow_call:
5
+ push:
6
+ branches-ignore:
7
+ - 'master'
8
+ pull_request:
9
+
10
+ jobs:
11
+ build:
12
+ runs-on: ubuntu-latest
13
+ strategy:
14
+ matrix:
15
+ python-version: [ "3.10", "3.11" ]
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+ - name: Set up Python ${{ matrix.python-version }}
19
+ uses: actions/setup-python@v5
20
+ with:
21
+ python-version: ${{ matrix.python-version }}
22
+ cache: 'pip'
23
+ - name: Cache for models
24
+ id: cache-models
25
+ uses: actions/cache@v4
26
+ with:
27
+ path: |
28
+ ~/.cache/huggingface/
29
+ key: ${{ runner.os }}-models-${{ hashFiles('requirements.txt') }}
30
+ restore-keys: |
31
+ ${{ runner.os }}-models-
32
+ - name: Install dependencies
33
+ run: |
34
+ python -m pip install --upgrade pip
35
+ pip install torch torchvision --index-url https://download.pytorch.org/whl/cpu
36
+ pip install -r requirements.txt
37
+ pip install -r requirements.dev.txt
38
+ - name: Test the code with pytest
39
+ run: |
40
+ pytest --cov=app .
41
+ - name: Upload coverage reports to Codecov with GitHub Action
42
+ uses: codecov/[email protected]
43
+ if: ${{ matrix.python-version == '3.11' }} # Only upload coverage reports for the latest Python version
44
+ env:
45
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
46
+ - name: Analysing the code with pylint
47
+ run: |
48
+ pylint --rc-file pylintrc.toml -j 0 app scripts tests && lint_result=$? || lint_result=$?
49
+ exit $(( $lint_result & 35 ))
50
+
.gitignore ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### PyCharm template
2
+ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
3
+ # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
4
+
5
+ # User-specific stuff
6
+ .idea/**/workspace.xml
7
+ .idea/**/tasks.xml
8
+ .idea/**/usage.statistics.xml
9
+ .idea/**/dictionaries
10
+ .idea/**/shelf
11
+
12
+ # AWS User-specific
13
+ .idea/**/aws.xml
14
+
15
+ # Generated files
16
+ .idea/**/contentModel.xml
17
+
18
+ # Sensitive or high-churn files
19
+ .idea/**/dataSources/
20
+ .idea/**/dataSources.ids
21
+ .idea/**/dataSources.local.xml
22
+ .idea/**/sqlDataSources.xml
23
+ .idea/**/dynamic.xml
24
+ .idea/**/uiDesigner.xml
25
+ .idea/**/dbnavigator.xml
26
+
27
+ # Gradle
28
+ .idea/**/gradle.xml
29
+ .idea/**/libraries
30
+
31
+ # Gradle and Maven with auto-import
32
+ # When using Gradle or Maven with auto-import, you should exclude module files,
33
+ # since they will be recreated, and may cause churn. Uncomment if using
34
+ # auto-import.
35
+ # .idea/artifacts
36
+ # .idea/compiler.xml
37
+ # .idea/jarRepositories.xml
38
+ # .idea/modules.xml
39
+ # .idea/*.iml
40
+ # .idea/modules
41
+ # *.iml
42
+ # *.ipr
43
+
44
+ # CMake
45
+ cmake-build-*/
46
+
47
+ # Mongo Explorer plugin
48
+ .idea/**/mongoSettings.xml
49
+
50
+ # File-based project format
51
+ *.iws
52
+
53
+ # IntelliJ
54
+ out/
55
+
56
+ # mpeltonen/sbt-idea plugin
57
+ .idea_modules/
58
+
59
+ # JIRA plugin
60
+ atlassian-ide-plugin.xml
61
+
62
+ # Cursive Clojure plugin
63
+ .idea/replstate.xml
64
+
65
+ # SonarLint plugin
66
+ .idea/sonarlint/
67
+
68
+ # Crashlytics plugin (for Android Studio and IntelliJ)
69
+ com_crashlytics_export_strings.xml
70
+ crashlytics.properties
71
+ crashlytics-build.properties
72
+ fabric.properties
73
+
74
+ # Editor-based Rest Client
75
+ .idea/httpRequests
76
+
77
+ # Android studio 3.1+ serialized cache file
78
+ .idea/caches/build_file_checksums.ser
79
+
80
+ ### Python template
81
+ # Byte-compiled / optimized / DLL files
82
+ __pycache__/
83
+ *.py[cod]
84
+ *$py.class
85
+
86
+ # C extensions
87
+ *.so
88
+
89
+ # Distribution / packaging
90
+ .Python
91
+ build/
92
+ develop-eggs/
93
+ dist/
94
+ downloads/
95
+ eggs/
96
+ .eggs/
97
+ lib/
98
+ lib64/
99
+ parts/
100
+ sdist/
101
+ var/
102
+ wheels/
103
+ share/python-wheels/
104
+ *.egg-info/
105
+ .installed.cfg
106
+ *.egg
107
+ MANIFEST
108
+
109
+ # PyInstaller
110
+ # Usually these files are written by a python script from a template
111
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
112
+ *.manifest
113
+ *.spec
114
+
115
+ # Installer logs
116
+ pip-log.txt
117
+ pip-delete-this-directory.txt
118
+
119
+ # Unit test / coverage reports
120
+ htmlcov/
121
+ .tox/
122
+ .nox/
123
+ .coverage
124
+ .coverage.*
125
+ .cache
126
+ nosetests.xml
127
+ coverage.xml
128
+ *.cover
129
+ *.py,cover
130
+ .hypothesis/
131
+ .pytest_cache/
132
+ cover/
133
+
134
+ # Translations
135
+ *.mo
136
+ *.pot
137
+
138
+ # Django stuff:
139
+ *.log
140
+ local_settings.py
141
+ db.sqlite3
142
+ db.sqlite3-journal
143
+
144
+ # Flask stuff:
145
+ instance/
146
+ .webassets-cache
147
+
148
+ # Scrapy stuff:
149
+ .scrapy
150
+
151
+ # Sphinx documentation
152
+ docs/_build/
153
+
154
+ # PyBuilder
155
+ .pybuilder/
156
+ target/
157
+
158
+ # Jupyter Notebook
159
+ .ipynb_checkpoints
160
+
161
+ # IPython
162
+ profile_default/
163
+ ipython_config.py
164
+
165
+ # pyenv
166
+ # For a library or package, you might want to ignore these files since the code is
167
+ # intended to run in multiple environments; otherwise, check them in:
168
+ # .python-version
169
+
170
+ # pipenv
171
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
172
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
173
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
174
+ # install all needed dependencies.
175
+ #Pipfile.lock
176
+
177
+ # poetry
178
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
179
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
180
+ # commonly ignored for libraries.
181
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
182
+ #poetry.lock
183
+
184
+ # pdm
185
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
186
+ #pdm.lock
187
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
188
+ # in version control.
189
+ # https://pdm.fming.dev/#use-with-ide
190
+ .pdm.toml
191
+
192
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
193
+ __pypackages__/
194
+
195
+ # Celery stuff
196
+ celerybeat-schedule
197
+ celerybeat.pid
198
+
199
+ # SageMath parsed files
200
+ *.sage.py
201
+
202
+ # Environments
203
+ .env
204
+ .venv
205
+ env/
206
+ venv/
207
+ ENV/
208
+ env.bak/
209
+ venv.bak/
210
+
211
+ # Spyder project settings
212
+ .spyderproject
213
+ .spyproject
214
+
215
+ # Rope project settings
216
+ .ropeproject
217
+
218
+ # mkdocs documentation
219
+ /site
220
+
221
+ # mypy
222
+ .mypy_cache/
223
+ .dmypy.json
224
+ dmypy.json
225
+
226
+ # Pyre type checker
227
+ .pyre/
228
+
229
+ # pytype static type analyzer
230
+ .pytype/
231
+
232
+ # Cython debug symbols
233
+ cython_debug/
234
+
235
+ # PyCharm
236
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
237
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
238
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
239
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
240
+ # .idea/
241
+
242
+ static/
243
+ qdrant_data/
244
+ images_metadata/
245
+ local_*/
246
+ .idea
.idea/.gitignore ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # 默认忽略的文件
2
+ /shelf/
3
+ /workspace.xml
4
+ # 基于编辑器的 HTTP 客户端请求
5
+ /httpRequests/
6
+ # Datasource local storage ignored files
7
+ /dataSources/
8
+ /dataSources.local.xml
.idea/NekoImageGallery.iml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="PYTHON_MODULE" version="4">
3
+ <component name="NewModuleRootManager">
4
+ <content url="file://$MODULE_DIR$">
5
+ <excludeFolder url="file://$MODULE_DIR$/.venv" />
6
+ </content>
7
+ <orderEntry type="inheritedJdk" />
8
+ <orderEntry type="sourceFolder" forTests="false" />
9
+ </component>
10
+ <component name="PackageRequirementsSettings">
11
+ <option name="versionSpecifier" value="Don't specify version" />
12
+ </component>
13
+ </module>
.idea/inspectionProfiles/Project_Default.xml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <component name="InspectionProjectProfileManager">
2
+ <profile version="1.0">
3
+ <option name="myName" value="Project Default" />
4
+ <inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
5
+ <option name="ignoredErrors">
6
+ <list>
7
+ <option value="N802" />
8
+ <option value="N801" />
9
+ <option value="N806" />
10
+ </list>
11
+ </option>
12
+ </inspection_tool>
13
+ </profile>
14
+ </component>
.idea/misc.xml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="Black">
4
+ <option name="sdkName" value="Python 3.10 (NekoImageGallery)" />
5
+ </component>
6
+ <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (NekoImageGallery)" project-jdk-type="Python SDK" />
7
+ </project>
.idea/modules.xml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ProjectModuleManager">
4
+ <modules>
5
+ <module fileurl="file://$PROJECT_DIR$/.idea/NekoImageGallery.iml" filepath="$PROJECT_DIR$/.idea/NekoImageGallery.iml" />
6
+ </modules>
7
+ </component>
8
+ </project>
.idea/vcs.xml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="VcsDirectoryMappings">
4
+ <mapping directory="$PROJECT_DIR$" vcs="Git" />
5
+ </component>
6
+ </project>
LICENSE ADDED
@@ -0,0 +1,661 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ GNU AFFERO GENERAL PUBLIC LICENSE
2
+ Version 3, 19 November 2007
3
+
4
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
+ Everyone is permitted to copy and distribute verbatim copies
6
+ of this license document, but changing it is not allowed.
7
+
8
+ Preamble
9
+
10
+ The GNU Affero General Public License is a free, copyleft license for
11
+ software and other kinds of works, specifically designed to ensure
12
+ cooperation with the community in the case of network server software.
13
+
14
+ The licenses for most software and other practical works are designed
15
+ to take away your freedom to share and change the works. By contrast,
16
+ our General Public Licenses are intended to guarantee your freedom to
17
+ share and change all versions of a program--to make sure it remains free
18
+ software for all its users.
19
+
20
+ When we speak of free software, we are referring to freedom, not
21
+ price. Our General Public Licenses are designed to make sure that you
22
+ have the freedom to distribute copies of free software (and charge for
23
+ them if you wish), that you receive source code or can get it if you
24
+ want it, that you can change the software or use pieces of it in new
25
+ free programs, and that you know you can do these things.
26
+
27
+ Developers that use our General Public Licenses protect your rights
28
+ with two steps: (1) assert copyright on the software, and (2) offer
29
+ you this License which gives you legal permission to copy, distribute
30
+ and/or modify the software.
31
+
32
+ A secondary benefit of defending all users' freedom is that
33
+ improvements made in alternate versions of the program, if they
34
+ receive widespread use, become available for other developers to
35
+ incorporate. Many developers of free software are heartened and
36
+ encouraged by the resulting cooperation. However, in the case of
37
+ software used on network servers, this result may fail to come about.
38
+ The GNU General Public License permits making a modified version and
39
+ letting the public access it on a server without ever releasing its
40
+ source code to the public.
41
+
42
+ The GNU Affero General Public License is designed specifically to
43
+ ensure that, in such cases, the modified source code becomes available
44
+ to the community. It requires the operator of a network server to
45
+ provide the source code of the modified version running there to the
46
+ users of that server. Therefore, public use of a modified version, on
47
+ a publicly accessible server, gives the public access to the source
48
+ code of the modified version.
49
+
50
+ An older license, called the Affero General Public License and
51
+ published by Affero, was designed to accomplish similar goals. This is
52
+ a different license, not a version of the Affero GPL, but Affero has
53
+ released a new version of the Affero GPL which permits relicensing under
54
+ this license.
55
+
56
+ The precise terms and conditions for copying, distribution and
57
+ modification follow.
58
+
59
+ TERMS AND CONDITIONS
60
+
61
+ 0. Definitions.
62
+
63
+ "This License" refers to version 3 of the GNU Affero General Public License.
64
+
65
+ "Copyright" also means copyright-like laws that apply to other kinds of
66
+ works, such as semiconductor masks.
67
+
68
+ "The Program" refers to any copyrightable work licensed under this
69
+ License. Each licensee is addressed as "you". "Licensees" and
70
+ "recipients" may be individuals or organizations.
71
+
72
+ To "modify" a work means to copy from or adapt all or part of the work
73
+ in a fashion requiring copyright permission, other than the making of an
74
+ exact copy. The resulting work is called a "modified version" of the
75
+ earlier work or a work "based on" the earlier work.
76
+
77
+ A "covered work" means either the unmodified Program or a work based
78
+ on the Program.
79
+
80
+ To "propagate" a work means to do anything with it that, without
81
+ permission, would make you directly or secondarily liable for
82
+ infringement under applicable copyright law, except executing it on a
83
+ computer or modifying a private copy. Propagation includes copying,
84
+ distribution (with or without modification), making available to the
85
+ public, and in some countries other activities as well.
86
+
87
+ To "convey" a work means any kind of propagation that enables other
88
+ parties to make or receive copies. Mere interaction with a user through
89
+ a computer network, with no transfer of a copy, is not conveying.
90
+
91
+ An interactive user interface displays "Appropriate Legal Notices"
92
+ to the extent that it includes a convenient and prominently visible
93
+ feature that (1) displays an appropriate copyright notice, and (2)
94
+ tells the user that there is no warranty for the work (except to the
95
+ extent that warranties are provided), that licensees may convey the
96
+ work under this License, and how to view a copy of this License. If
97
+ the interface presents a list of user commands or options, such as a
98
+ menu, a prominent item in the list meets this criterion.
99
+
100
+ 1. Source Code.
101
+
102
+ The "source code" for a work means the preferred form of the work
103
+ for making modifications to it. "Object code" means any non-source
104
+ form of a work.
105
+
106
+ A "Standard Interface" means an interface that either is an official
107
+ standard defined by a recognized standards body, or, in the case of
108
+ interfaces specified for a particular programming language, one that
109
+ is widely used among developers working in that language.
110
+
111
+ The "System Libraries" of an executable work include anything, other
112
+ than the work as a whole, that (a) is included in the normal form of
113
+ packaging a Major Component, but which is not part of that Major
114
+ Component, and (b) serves only to enable use of the work with that
115
+ Major Component, or to implement a Standard Interface for which an
116
+ implementation is available to the public in source code form. A
117
+ "Major Component", in this context, means a major essential component
118
+ (kernel, window system, and so on) of the specific operating system
119
+ (if any) on which the executable work runs, or a compiler used to
120
+ produce the work, or an object code interpreter used to run it.
121
+
122
+ The "Corresponding Source" for a work in object code form means all
123
+ the source code needed to generate, install, and (for an executable
124
+ work) run the object code and to modify the work, including scripts to
125
+ control those activities. However, it does not include the work's
126
+ System Libraries, or general-purpose tools or generally available free
127
+ programs which are used unmodified in performing those activities but
128
+ which are not part of the work. For example, Corresponding Source
129
+ includes interface definition files associated with source files for
130
+ the work, and the source code for shared libraries and dynamically
131
+ linked subprograms that the work is specifically designed to require,
132
+ such as by intimate data communication or control flow between those
133
+ subprograms and other parts of the work.
134
+
135
+ The Corresponding Source need not include anything that users
136
+ can regenerate automatically from other parts of the Corresponding
137
+ Source.
138
+
139
+ The Corresponding Source for a work in source code form is that
140
+ same work.
141
+
142
+ 2. Basic Permissions.
143
+
144
+ All rights granted under this License are granted for the term of
145
+ copyright on the Program, and are irrevocable provided the stated
146
+ conditions are met. This License explicitly affirms your unlimited
147
+ permission to run the unmodified Program. The output from running a
148
+ covered work is covered by this License only if the output, given its
149
+ content, constitutes a covered work. This License acknowledges your
150
+ rights of fair use or other equivalent, as provided by copyright law.
151
+
152
+ You may make, run and propagate covered works that you do not
153
+ convey, without conditions so long as your license otherwise remains
154
+ in force. You may convey covered works to others for the sole purpose
155
+ of having them make modifications exclusively for you, or provide you
156
+ with facilities for running those works, provided that you comply with
157
+ the terms of this License in conveying all material for which you do
158
+ not control copyright. Those thus making or running the covered works
159
+ for you must do so exclusively on your behalf, under your direction
160
+ and control, on terms that prohibit them from making any copies of
161
+ your copyrighted material outside their relationship with you.
162
+
163
+ Conveying under any other circumstances is permitted solely under
164
+ the conditions stated below. Sublicensing is not allowed; section 10
165
+ makes it unnecessary.
166
+
167
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168
+
169
+ No covered work shall be deemed part of an effective technological
170
+ measure under any applicable law fulfilling obligations under article
171
+ 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172
+ similar laws prohibiting or restricting circumvention of such
173
+ measures.
174
+
175
+ When you convey a covered work, you waive any legal power to forbid
176
+ circumvention of technological measures to the extent such circumvention
177
+ is effected by exercising rights under this License with respect to
178
+ the covered work, and you disclaim any intention to limit operation or
179
+ modification of the work as a means of enforcing, against the work's
180
+ users, your or third parties' legal rights to forbid circumvention of
181
+ technological measures.
182
+
183
+ 4. Conveying Verbatim Copies.
184
+
185
+ You may convey verbatim copies of the Program's source code as you
186
+ receive it, in any medium, provided that you conspicuously and
187
+ appropriately publish on each copy an appropriate copyright notice;
188
+ keep intact all notices stating that this License and any
189
+ non-permissive terms added in accord with section 7 apply to the code;
190
+ keep intact all notices of the absence of any warranty; and give all
191
+ recipients a copy of this License along with the Program.
192
+
193
+ You may charge any price or no price for each copy that you convey,
194
+ and you may offer support or warranty protection for a fee.
195
+
196
+ 5. Conveying Modified Source Versions.
197
+
198
+ You may convey a work based on the Program, or the modifications to
199
+ produce it from the Program, in the form of source code under the
200
+ terms of section 4, provided that you also meet all of these conditions:
201
+
202
+ a) The work must carry prominent notices stating that you modified
203
+ it, and giving a relevant date.
204
+
205
+ b) The work must carry prominent notices stating that it is
206
+ released under this License and any conditions added under section
207
+ 7. This requirement modifies the requirement in section 4 to
208
+ "keep intact all notices".
209
+
210
+ c) You must license the entire work, as a whole, under this
211
+ License to anyone who comes into possession of a copy. This
212
+ License will therefore apply, along with any applicable section 7
213
+ additional terms, to the whole of the work, and all its parts,
214
+ regardless of how they are packaged. This License gives no
215
+ permission to license the work in any other way, but it does not
216
+ invalidate such permission if you have separately received it.
217
+
218
+ d) If the work has interactive user interfaces, each must display
219
+ Appropriate Legal Notices; however, if the Program has interactive
220
+ interfaces that do not display Appropriate Legal Notices, your
221
+ work need not make them do so.
222
+
223
+ A compilation of a covered work with other separate and independent
224
+ works, which are not by their nature extensions of the covered work,
225
+ and which are not combined with it such as to form a larger program,
226
+ in or on a volume of a storage or distribution medium, is called an
227
+ "aggregate" if the compilation and its resulting copyright are not
228
+ used to limit the access or legal rights of the compilation's users
229
+ beyond what the individual works permit. Inclusion of a covered work
230
+ in an aggregate does not cause this License to apply to the other
231
+ parts of the aggregate.
232
+
233
+ 6. Conveying Non-Source Forms.
234
+
235
+ You may convey a covered work in object code form under the terms
236
+ of sections 4 and 5, provided that you also convey the
237
+ machine-readable Corresponding Source under the terms of this License,
238
+ in one of these ways:
239
+
240
+ a) Convey the object code in, or embodied in, a physical product
241
+ (including a physical distribution medium), accompanied by the
242
+ Corresponding Source fixed on a durable physical medium
243
+ customarily used for software interchange.
244
+
245
+ b) Convey the object code in, or embodied in, a physical product
246
+ (including a physical distribution medium), accompanied by a
247
+ written offer, valid for at least three years and valid for as
248
+ long as you offer spare parts or customer support for that product
249
+ model, to give anyone who possesses the object code either (1) a
250
+ copy of the Corresponding Source for all the software in the
251
+ product that is covered by this License, on a durable physical
252
+ medium customarily used for software interchange, for a price no
253
+ more than your reasonable cost of physically performing this
254
+ conveying of source, or (2) access to copy the
255
+ Corresponding Source from a network server at no charge.
256
+
257
+ c) Convey individual copies of the object code with a copy of the
258
+ written offer to provide the Corresponding Source. This
259
+ alternative is allowed only occasionally and noncommercially, and
260
+ only if you received the object code with such an offer, in accord
261
+ with subsection 6b.
262
+
263
+ d) Convey the object code by offering access from a designated
264
+ place (gratis or for a charge), and offer equivalent access to the
265
+ Corresponding Source in the same way through the same place at no
266
+ further charge. You need not require recipients to copy the
267
+ Corresponding Source along with the object code. If the place to
268
+ copy the object code is a network server, the Corresponding Source
269
+ may be on a different server (operated by you or a third party)
270
+ that supports equivalent copying facilities, provided you maintain
271
+ clear directions next to the object code saying where to find the
272
+ Corresponding Source. Regardless of what server hosts the
273
+ Corresponding Source, you remain obligated to ensure that it is
274
+ available for as long as needed to satisfy these requirements.
275
+
276
+ e) Convey the object code using peer-to-peer transmission, provided
277
+ you inform other peers where the object code and Corresponding
278
+ Source of the work are being offered to the general public at no
279
+ charge under subsection 6d.
280
+
281
+ A separable portion of the object code, whose source code is excluded
282
+ from the Corresponding Source as a System Library, need not be
283
+ included in conveying the object code work.
284
+
285
+ A "User Product" is either (1) a "consumer product", which means any
286
+ tangible personal property which is normally used for personal, family,
287
+ or household purposes, or (2) anything designed or sold for incorporation
288
+ into a dwelling. In determining whether a product is a consumer product,
289
+ doubtful cases shall be resolved in favor of coverage. For a particular
290
+ product received by a particular user, "normally used" refers to a
291
+ typical or common use of that class of product, regardless of the status
292
+ of the particular user or of the way in which the particular user
293
+ actually uses, or expects or is expected to use, the product. A product
294
+ is a consumer product regardless of whether the product has substantial
295
+ commercial, industrial or non-consumer uses, unless such uses represent
296
+ the only significant mode of use of the product.
297
+
298
+ "Installation Information" for a User Product means any methods,
299
+ procedures, authorization keys, or other information required to install
300
+ and execute modified versions of a covered work in that User Product from
301
+ a modified version of its Corresponding Source. The information must
302
+ suffice to ensure that the continued functioning of the modified object
303
+ code is in no case prevented or interfered with solely because
304
+ modification has been made.
305
+
306
+ If you convey an object code work under this section in, or with, or
307
+ specifically for use in, a User Product, and the conveying occurs as
308
+ part of a transaction in which the right of possession and use of the
309
+ User Product is transferred to the recipient in perpetuity or for a
310
+ fixed term (regardless of how the transaction is characterized), the
311
+ Corresponding Source conveyed under this section must be accompanied
312
+ by the Installation Information. But this requirement does not apply
313
+ if neither you nor any third party retains the ability to install
314
+ modified object code on the User Product (for example, the work has
315
+ been installed in ROM).
316
+
317
+ The requirement to provide Installation Information does not include a
318
+ requirement to continue to provide support service, warranty, or updates
319
+ for a work that has been modified or installed by the recipient, or for
320
+ the User Product in which it has been modified or installed. Access to a
321
+ network may be denied when the modification itself materially and
322
+ adversely affects the operation of the network or violates the rules and
323
+ protocols for communication across the network.
324
+
325
+ Corresponding Source conveyed, and Installation Information provided,
326
+ in accord with this section must be in a format that is publicly
327
+ documented (and with an implementation available to the public in
328
+ source code form), and must require no special password or key for
329
+ unpacking, reading or copying.
330
+
331
+ 7. Additional Terms.
332
+
333
+ "Additional permissions" are terms that supplement the terms of this
334
+ License by making exceptions from one or more of its conditions.
335
+ Additional permissions that are applicable to the entire Program shall
336
+ be treated as though they were included in this License, to the extent
337
+ that they are valid under applicable law. If additional permissions
338
+ apply only to part of the Program, that part may be used separately
339
+ under those permissions, but the entire Program remains governed by
340
+ this License without regard to the additional permissions.
341
+
342
+ When you convey a copy of a covered work, you may at your option
343
+ remove any additional permissions from that copy, or from any part of
344
+ it. (Additional permissions may be written to require their own
345
+ removal in certain cases when you modify the work.) You may place
346
+ additional permissions on material, added by you to a covered work,
347
+ for which you have or can give appropriate copyright permission.
348
+
349
+ Notwithstanding any other provision of this License, for material you
350
+ add to a covered work, you may (if authorized by the copyright holders of
351
+ that material) supplement the terms of this License with terms:
352
+
353
+ a) Disclaiming warranty or limiting liability differently from the
354
+ terms of sections 15 and 16 of this License; or
355
+
356
+ b) Requiring preservation of specified reasonable legal notices or
357
+ author attributions in that material or in the Appropriate Legal
358
+ Notices displayed by works containing it; or
359
+
360
+ c) Prohibiting misrepresentation of the origin of that material, or
361
+ requiring that modified versions of such material be marked in
362
+ reasonable ways as different from the original version; or
363
+
364
+ d) Limiting the use for publicity purposes of names of licensors or
365
+ authors of the material; or
366
+
367
+ e) Declining to grant rights under trademark law for use of some
368
+ trade names, trademarks, or service marks; or
369
+
370
+ f) Requiring indemnification of licensors and authors of that
371
+ material by anyone who conveys the material (or modified versions of
372
+ it) with contractual assumptions of liability to the recipient, for
373
+ any liability that these contractual assumptions directly impose on
374
+ those licensors and authors.
375
+
376
+ All other non-permissive additional terms are considered "further
377
+ restrictions" within the meaning of section 10. If the Program as you
378
+ received it, or any part of it, contains a notice stating that it is
379
+ governed by this License along with a term that is a further
380
+ restriction, you may remove that term. If a license document contains
381
+ a further restriction but permits relicensing or conveying under this
382
+ License, you may add to a covered work material governed by the terms
383
+ of that license document, provided that the further restriction does
384
+ not survive such relicensing or conveying.
385
+
386
+ If you add terms to a covered work in accord with this section, you
387
+ must place, in the relevant source files, a statement of the
388
+ additional terms that apply to those files, or a notice indicating
389
+ where to find the applicable terms.
390
+
391
+ Additional terms, permissive or non-permissive, may be stated in the
392
+ form of a separately written license, or stated as exceptions;
393
+ the above requirements apply either way.
394
+
395
+ 8. Termination.
396
+
397
+ You may not propagate or modify a covered work except as expressly
398
+ provided under this License. Any attempt otherwise to propagate or
399
+ modify it is void, and will automatically terminate your rights under
400
+ this License (including any patent licenses granted under the third
401
+ paragraph of section 11).
402
+
403
+ However, if you cease all violation of this License, then your
404
+ license from a particular copyright holder is reinstated (a)
405
+ provisionally, unless and until the copyright holder explicitly and
406
+ finally terminates your license, and (b) permanently, if the copyright
407
+ holder fails to notify you of the violation by some reasonable means
408
+ prior to 60 days after the cessation.
409
+
410
+ Moreover, your license from a particular copyright holder is
411
+ reinstated permanently if the copyright holder notifies you of the
412
+ violation by some reasonable means, this is the first time you have
413
+ received notice of violation of this License (for any work) from that
414
+ copyright holder, and you cure the violation prior to 30 days after
415
+ your receipt of the notice.
416
+
417
+ Termination of your rights under this section does not terminate the
418
+ licenses of parties who have received copies or rights from you under
419
+ this License. If your rights have been terminated and not permanently
420
+ reinstated, you do not qualify to receive new licenses for the same
421
+ material under section 10.
422
+
423
+ 9. Acceptance Not Required for Having Copies.
424
+
425
+ You are not required to accept this License in order to receive or
426
+ run a copy of the Program. Ancillary propagation of a covered work
427
+ occurring solely as a consequence of using peer-to-peer transmission
428
+ to receive a copy likewise does not require acceptance. However,
429
+ nothing other than this License grants you permission to propagate or
430
+ modify any covered work. These actions infringe copyright if you do
431
+ not accept this License. Therefore, by modifying or propagating a
432
+ covered work, you indicate your acceptance of this License to do so.
433
+
434
+ 10. Automatic Licensing of Downstream Recipients.
435
+
436
+ Each time you convey a covered work, the recipient automatically
437
+ receives a license from the original licensors, to run, modify and
438
+ propagate that work, subject to this License. You are not responsible
439
+ for enforcing compliance by third parties with this License.
440
+
441
+ An "entity transaction" is a transaction transferring control of an
442
+ organization, or substantially all assets of one, or subdividing an
443
+ organization, or merging organizations. If propagation of a covered
444
+ work results from an entity transaction, each party to that
445
+ transaction who receives a copy of the work also receives whatever
446
+ licenses to the work the party's predecessor in interest had or could
447
+ give under the previous paragraph, plus a right to possession of the
448
+ Corresponding Source of the work from the predecessor in interest, if
449
+ the predecessor has it or can get it with reasonable efforts.
450
+
451
+ You may not impose any further restrictions on the exercise of the
452
+ rights granted or affirmed under this License. For example, you may
453
+ not impose a license fee, royalty, or other charge for exercise of
454
+ rights granted under this License, and you may not initiate litigation
455
+ (including a cross-claim or counterclaim in a lawsuit) alleging that
456
+ any patent claim is infringed by making, using, selling, offering for
457
+ sale, or importing the Program or any portion of it.
458
+
459
+ 11. Patents.
460
+
461
+ A "contributor" is a copyright holder who authorizes use under this
462
+ License of the Program or a work on which the Program is based. The
463
+ work thus licensed is called the contributor's "contributor version".
464
+
465
+ A contributor's "essential patent claims" are all patent claims
466
+ owned or controlled by the contributor, whether already acquired or
467
+ hereafter acquired, that would be infringed by some manner, permitted
468
+ by this License, of making, using, or selling its contributor version,
469
+ but do not include claims that would be infringed only as a
470
+ consequence of further modification of the contributor version. For
471
+ purposes of this definition, "control" includes the right to grant
472
+ patent sublicenses in a manner consistent with the requirements of
473
+ this License.
474
+
475
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
476
+ patent license under the contributor's essential patent claims, to
477
+ make, use, sell, offer for sale, import and otherwise run, modify and
478
+ propagate the contents of its contributor version.
479
+
480
+ In the following three paragraphs, a "patent license" is any express
481
+ agreement or commitment, however denominated, not to enforce a patent
482
+ (such as an express permission to practice a patent or covenant not to
483
+ sue for patent infringement). To "grant" such a patent license to a
484
+ party means to make such an agreement or commitment not to enforce a
485
+ patent against the party.
486
+
487
+ If you convey a covered work, knowingly relying on a patent license,
488
+ and the Corresponding Source of the work is not available for anyone
489
+ to copy, free of charge and under the terms of this License, through a
490
+ publicly available network server or other readily accessible means,
491
+ then you must either (1) cause the Corresponding Source to be so
492
+ available, or (2) arrange to deprive yourself of the benefit of the
493
+ patent license for this particular work, or (3) arrange, in a manner
494
+ consistent with the requirements of this License, to extend the patent
495
+ license to downstream recipients. "Knowingly relying" means you have
496
+ actual knowledge that, but for the patent license, your conveying the
497
+ covered work in a country, or your recipient's use of the covered work
498
+ in a country, would infringe one or more identifiable patents in that
499
+ country that you have reason to believe are valid.
500
+
501
+ If, pursuant to or in connection with a single transaction or
502
+ arrangement, you convey, or propagate by procuring conveyance of, a
503
+ covered work, and grant a patent license to some of the parties
504
+ receiving the covered work authorizing them to use, propagate, modify
505
+ or convey a specific copy of the covered work, then the patent license
506
+ you grant is automatically extended to all recipients of the covered
507
+ work and works based on it.
508
+
509
+ A patent license is "discriminatory" if it does not include within
510
+ the scope of its coverage, prohibits the exercise of, or is
511
+ conditioned on the non-exercise of one or more of the rights that are
512
+ specifically granted under this License. You may not convey a covered
513
+ work if you are a party to an arrangement with a third party that is
514
+ in the business of distributing software, under which you make payment
515
+ to the third party based on the extent of your activity of conveying
516
+ the work, and under which the third party grants, to any of the
517
+ parties who would receive the covered work from you, a discriminatory
518
+ patent license (a) in connection with copies of the covered work
519
+ conveyed by you (or copies made from those copies), or (b) primarily
520
+ for and in connection with specific products or compilations that
521
+ contain the covered work, unless you entered into that arrangement,
522
+ or that patent license was granted, prior to 28 March 2007.
523
+
524
+ Nothing in this License shall be construed as excluding or limiting
525
+ any implied license or other defenses to infringement that may
526
+ otherwise be available to you under applicable patent law.
527
+
528
+ 12. No Surrender of Others' Freedom.
529
+
530
+ If conditions are imposed on you (whether by court order, agreement or
531
+ otherwise) that contradict the conditions of this License, they do not
532
+ excuse you from the conditions of this License. If you cannot convey a
533
+ covered work so as to satisfy simultaneously your obligations under this
534
+ License and any other pertinent obligations, then as a consequence you may
535
+ not convey it at all. For example, if you agree to terms that obligate you
536
+ to collect a royalty for further conveying from those to whom you convey
537
+ the Program, the only way you could satisfy both those terms and this
538
+ License would be to refrain entirely from conveying the Program.
539
+
540
+ 13. Remote Network Interaction; Use with the GNU General Public License.
541
+
542
+ Notwithstanding any other provision of this License, if you modify the
543
+ Program, your modified version must prominently offer all users
544
+ interacting with it remotely through a computer network (if your version
545
+ supports such interaction) an opportunity to receive the Corresponding
546
+ Source of your version by providing access to the Corresponding Source
547
+ from a network server at no charge, through some standard or customary
548
+ means of facilitating copying of software. This Corresponding Source
549
+ shall include the Corresponding Source for any work covered by version 3
550
+ of the GNU General Public License that is incorporated pursuant to the
551
+ following paragraph.
552
+
553
+ Notwithstanding any other provision of this License, you have
554
+ permission to link or combine any covered work with a work licensed
555
+ under version 3 of the GNU General Public License into a single
556
+ combined work, and to convey the resulting work. The terms of this
557
+ License will continue to apply to the part which is the covered work,
558
+ but the work with which it is combined will remain governed by version
559
+ 3 of the GNU General Public License.
560
+
561
+ 14. Revised Versions of this License.
562
+
563
+ The Free Software Foundation may publish revised and/or new versions of
564
+ the GNU Affero General Public License from time to time. Such new versions
565
+ will be similar in spirit to the present version, but may differ in detail to
566
+ address new problems or concerns.
567
+
568
+ Each version is given a distinguishing version number. If the
569
+ Program specifies that a certain numbered version of the GNU Affero General
570
+ Public License "or any later version" applies to it, you have the
571
+ option of following the terms and conditions either of that numbered
572
+ version or of any later version published by the Free Software
573
+ Foundation. If the Program does not specify a version number of the
574
+ GNU Affero General Public License, you may choose any version ever published
575
+ by the Free Software Foundation.
576
+
577
+ If the Program specifies that a proxy can decide which future
578
+ versions of the GNU Affero General Public License can be used, that proxy's
579
+ public statement of acceptance of a version permanently authorizes you
580
+ to choose that version for the Program.
581
+
582
+ Later license versions may give you additional or different
583
+ permissions. However, no additional obligations are imposed on any
584
+ author or copyright holder as a result of your choosing to follow a
585
+ later version.
586
+
587
+ 15. Disclaimer of Warranty.
588
+
589
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590
+ APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591
+ HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592
+ OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593
+ THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594
+ PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595
+ IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596
+ ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597
+
598
+ 16. Limitation of Liability.
599
+
600
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601
+ WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602
+ THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603
+ GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604
+ USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605
+ DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606
+ PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607
+ EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608
+ SUCH DAMAGES.
609
+
610
+ 17. Interpretation of Sections 15 and 16.
611
+
612
+ If the disclaimer of warranty and limitation of liability provided
613
+ above cannot be given local legal effect according to their terms,
614
+ reviewing courts shall apply local law that most closely approximates
615
+ an absolute waiver of all civil liability in connection with the
616
+ Program, unless a warranty or assumption of liability accompanies a
617
+ copy of the Program in return for a fee.
618
+
619
+ END OF TERMS AND CONDITIONS
620
+
621
+ How to Apply These Terms to Your New Programs
622
+
623
+ If you develop a new program, and you want it to be of the greatest
624
+ possible use to the public, the best way to achieve this is to make it
625
+ free software which everyone can redistribute and change under these terms.
626
+
627
+ To do so, attach the following notices to the program. It is safest
628
+ to attach them to the start of each source file to most effectively
629
+ state the exclusion of warranty; and each file should have at least
630
+ the "copyright" line and a pointer to where the full notice is found.
631
+
632
+ <one line to give the program's name and a brief idea of what it does.>
633
+ Copyright (C) <year> <name of author>
634
+
635
+ This program is free software: you can redistribute it and/or modify
636
+ it under the terms of the GNU Affero General Public License as published
637
+ by the Free Software Foundation, either version 3 of the License, or
638
+ (at your option) any later version.
639
+
640
+ This program is distributed in the hope that it will be useful,
641
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
642
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643
+ GNU Affero General Public License for more details.
644
+
645
+ You should have received a copy of the GNU Affero General Public License
646
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
647
+
648
+ Also add information on how to contact you by electronic and paper mail.
649
+
650
+ If your software can interact with users remotely through a computer
651
+ network, you should also make sure that it provides a way for users to
652
+ get its source. For example, if your program is a web application, its
653
+ interface could display a "Source" link that leads users to an archive
654
+ of the code. There are many ways you could offer source, and different
655
+ solutions will be better for different programs; see section 13 for the
656
+ specific requirements.
657
+
658
+ You should also get your employer (if you work as a programmer) or school,
659
+ if any, to sign a "copyright disclaimer" for the program, if necessary.
660
+ For more information on this, and how to apply and follow the GNU AGPL, see
661
+ <https://www.gnu.org/licenses/>.
app/Controllers/admin.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from io import BytesIO
3
+ from pathlib import PurePath
4
+ from typing import Annotated
5
+ from uuid import UUID
6
+
7
+ from PIL import Image, UnidentifiedImageError
8
+ from fastapi import APIRouter, Depends, HTTPException, params, UploadFile, File
9
+ from loguru import logger
10
+
11
+ from app.Models.api_models.admin_api_model import ImageOptUpdateModel, DuplicateValidationModel
12
+ from app.Models.api_models.admin_query_params import UploadImageModel
13
+ from app.Models.api_response.admin_api_response import ServerInfoResponse, ImageUploadResponse, \
14
+ DuplicateValidationResponse
15
+ from app.Models.api_response.base import NekoProtocol
16
+ from app.Models.errors import PointDuplicateError
17
+ from app.Models.img_data import ImageData
18
+ from app.Services.authentication import force_admin_token_verify
19
+ from app.Services.provider import ServiceProvider
20
+ from app.Services.vector_db_context import PointNotFoundError
21
+ from app.config import config
22
+ from app.util.generate_uuid import generate_uuid_from_sha1
23
+ from app.util.local_file_utility import VALID_IMAGE_EXTENSIONS
24
+
25
+ admin_router = APIRouter(dependencies=[Depends(force_admin_token_verify)], tags=["Admin"])
26
+
27
+ services: ServiceProvider | None = None
28
+
29
+
30
+ @admin_router.delete("/delete/{image_id}",
31
+ description="Delete image with the given id from database. "
32
+ "If the image is a local image, it will be moved to `/static/_deleted` folder.")
33
+ async def delete_image(
34
+ image_id: Annotated[UUID, params.Path(description="The id of the image you want to delete.")]) -> NekoProtocol:
35
+ try:
36
+ point = await services.db_context.retrieve_by_id(str(image_id))
37
+ except PointNotFoundError as ex:
38
+ raise HTTPException(404, "Cannot find the image with the given ID.") from ex
39
+ await services.db_context.deleteItems([str(point.id)])
40
+ logger.success("Image {} deleted from database.", point.id)
41
+
42
+ if config.storage.method.enabled: # local image
43
+ if point.local:
44
+ image_files = [itm[0] async for itm in
45
+ services.storage_service.active_storage.list_files("", f"{point.id}.*")]
46
+ assert len(image_files) <= 1
47
+ if not image_files:
48
+ logger.warning("Image {} is a local image but not found in static folder.", point.id)
49
+ else:
50
+ await services.storage_service.active_storage.move(image_files[0], f"_deleted/{image_files[0].name}")
51
+ logger.success("Image {} removed.", image_files[0].name)
52
+ if point.thumbnail_url is not None and (point.local or point.local_thumbnail):
53
+ thumbnail_file = PurePath(f"thumbnails/{point.id}.webp")
54
+ if await services.storage_service.active_storage.is_exist(thumbnail_file):
55
+ await services.storage_service.active_storage.delete(thumbnail_file)
56
+ logger.success("Thumbnail {} removed.", thumbnail_file.name)
57
+ else:
58
+ logger.warning("Thumbnail {} not found.", thumbnail_file.name)
59
+
60
+ return NekoProtocol(message="Image deleted.")
61
+
62
+
63
+ @admin_router.put("/update_opt/{image_id}", description="Update a image's optional information")
64
+ async def update_image(image_id: Annotated[UUID, params.Path(description="The id of the image you want to delete.")],
65
+ model: ImageOptUpdateModel) -> NekoProtocol:
66
+ if model.empty():
67
+ raise HTTPException(422, "Nothing to update.")
68
+ try:
69
+ point = await services.db_context.retrieve_by_id(str(image_id))
70
+ except PointNotFoundError as ex:
71
+ raise HTTPException(404, "Cannot find the image with the given ID.") from ex
72
+
73
+ if model.thumbnail_url is not None:
74
+ if point.local or point.local_thumbnail:
75
+ raise HTTPException(422, "Cannot change the thumbnail URL of a local image.")
76
+ point.thumbnail_url = model.thumbnail_url
77
+ if model.url is not None:
78
+ if point.local:
79
+ raise HTTPException(422, "Cannot change the URL of a local image.")
80
+ point.url = model.url
81
+ if model.starred is not None:
82
+ point.starred = model.starred
83
+ if model.categories is not None:
84
+ point.categories = model.categories
85
+
86
+ await services.db_context.updatePayload(point)
87
+ logger.success("Image {} updated.", point.id)
88
+
89
+ return NekoProtocol(message="Image updated.")
90
+
91
+
92
+ IMAGE_MIMES = {
93
+ "image/jpeg": "jpeg",
94
+ "image/png": "png",
95
+ "image/webp": "webp",
96
+ "image/gif": "gif",
97
+ }
98
+
99
+
100
+ @admin_router.post("/upload",
101
+ description="Upload image to server. The image will be indexed and stored in the database. If "
102
+ "local is set to true, the image will be uploaded to local storage.")
103
+ async def upload_image(image_file: Annotated[UploadFile, File(description="The image to be uploaded.")],
104
+ model: Annotated[UploadImageModel, Depends()]) -> ImageUploadResponse:
105
+ # generate an ID for the image
106
+ img_type = None
107
+ if image_file.content_type.lower() in IMAGE_MIMES:
108
+ img_type = IMAGE_MIMES[image_file.content_type.lower()]
109
+ elif image_file.filename:
110
+ extension = PurePath(image_file.filename).suffix.lower()
111
+ if extension in VALID_IMAGE_EXTENSIONS:
112
+ img_type = extension[1:]
113
+ if not img_type:
114
+ logger.warning("Failed to infer image format of the uploaded image. Content Type: {}, Filename: {}",
115
+ image_file.content_type, image_file.filename)
116
+ raise HTTPException(415, "Unsupported image format.")
117
+ img_bytes = await image_file.read()
118
+ try:
119
+ img_id = await services.upload_service.assign_image_id(img_bytes)
120
+ except PointDuplicateError as ex:
121
+ raise HTTPException(409,
122
+ f"The uploaded point is already contained in the database! entity id: {ex.entity_id}") \
123
+ from ex
124
+ try:
125
+ image = Image.open(BytesIO(img_bytes))
126
+ image.verify()
127
+ image.close()
128
+ except UnidentifiedImageError as ex:
129
+ logger.warning("Invalid image file from upload request. id: {}", img_id)
130
+ raise HTTPException(422, "Cannot open the image file.") from ex
131
+
132
+ image_data = ImageData(id=img_id,
133
+ url=model.url,
134
+ thumbnail_url=model.thumbnail_url,
135
+ local=model.local,
136
+ categories=model.categories,
137
+ starred=model.starred,
138
+ format=img_type,
139
+ index_date=datetime.now())
140
+
141
+ await services.upload_service.queue_upload_image(image_data, img_bytes, model.skip_ocr, model.local_thumbnail)
142
+ return ImageUploadResponse(message="OK. Image added to upload queue.", image_id=img_id)
143
+
144
+
145
+ @admin_router.get("/server_info", description="Get server information")
146
+ async def server_info() -> ServerInfoResponse:
147
+ return ServerInfoResponse(message="Successfully get server information!",
148
+ image_count=await services.db_context.get_counts(exact=True),
149
+ index_queue_length=services.upload_service.get_queue_size())
150
+
151
+
152
+ @admin_router.post("/duplication_validate",
153
+ description="Check if an image exists in the server by its SHA1 hash. If the image exists, "
154
+ "the image ID will be returned.\n"
155
+ "This is helpful for checking if an image is already in the server without "
156
+ "uploading the image.")
157
+ async def duplication_validate(model: DuplicateValidationModel) -> DuplicateValidationResponse:
158
+ ids = [generate_uuid_from_sha1(t) for t in model.hashes]
159
+ valid_ids = await services.db_context.validate_ids([str(t) for t in ids])
160
+ exists_matrix = [str(t) in valid_ids or t in services.upload_service.uploading_ids for t in ids]
161
+ return DuplicateValidationResponse(
162
+ exists=exists_matrix,
163
+ entity_ids=[(str(t) if exists else None) for (t, exists) in zip(ids, exists_matrix)],
164
+ message="Validation completed.")
app/Controllers/images.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated
2
+ from uuid import UUID
3
+
4
+ from fastapi import APIRouter, Depends, Path, HTTPException, Query
5
+
6
+ from app.Models.api_response.images_api_response import QueryByIdApiResponse, ImageStatus, QueryImagesApiResponse
7
+ from app.Models.query_params import FilterParams
8
+ from app.Services.authentication import force_access_token_verify
9
+ from app.Services.provider import ServiceProvider
10
+ from app.Services.vector_db_context import PointNotFoundError
11
+ from app.config import config
12
+
13
+ images_router = APIRouter(dependencies=([Depends(force_access_token_verify)] if config.access_protected else None),
14
+ tags=["Images"])
15
+
16
+ services: ServiceProvider | None = None # The service provider will be injected in the webapp initialize
17
+
18
+
19
+ @images_router.get("/id/{image_id}", description="Query the image info with the given image ID. \n"
20
+ "This can also be used to check the status"
21
+ " of an image in the index queue.")
22
+ async def query_image_by_id(image_id: Annotated[UUID, Path(description="The id of the image you want to query.")]):
23
+ try:
24
+ return QueryByIdApiResponse(img=await services.db_context.retrieve_by_id(str(image_id)),
25
+ img_status=ImageStatus.MAPPED,
26
+ message="Success query the image with the given ID.")
27
+ except PointNotFoundError as ex:
28
+ if services.upload_service and image_id in services.upload_service.uploading_ids:
29
+ return QueryByIdApiResponse(img=None,
30
+ img_status=ImageStatus.IN_QUEUE,
31
+ message="The image is in the indexing queue.")
32
+ raise HTTPException(404, "Cannot find the image with the given ID.") from ex
33
+
34
+
35
+ @images_router.get("/", description="Query images in order of ID.")
36
+ async def scroll_images(filter_param: Annotated[FilterParams, Depends()],
37
+ prev_offset_id: Annotated[UUID, Query(description="The previous offset image ID.")] = None,
38
+ count: Annotated[int, Query(ge=1, le=100, description="The number of images to query.")] = 15):
39
+ # validate the offset ID
40
+ if prev_offset_id is not None and len(await services.db_context.validate_ids([str(prev_offset_id)])) == 0:
41
+ raise HTTPException(404, "The previous offset ID is invalid.")
42
+ images, offset = await services.db_context.scroll_points(str(prev_offset_id), count, filter_param=filter_param)
43
+ return QueryImagesApiResponse(images=images, next_page_offset=offset, message="Success query images.")
app/Controllers/search.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from io import BytesIO
2
+ from typing import Annotated, List
3
+ from uuid import uuid4, UUID
4
+
5
+ from PIL import Image
6
+ from fastapi import APIRouter, HTTPException
7
+ from fastapi.params import File, Query, Path, Depends
8
+ from loguru import logger
9
+
10
+ from app.Models.api_models.search_api_model import AdvancedSearchModel, CombinedSearchModel, SearchBasisEnum
11
+ from app.Models.api_response.search_api_response import SearchApiResponse
12
+ from app.Models.query_params import SearchPagingParams, FilterParams
13
+ from app.Models.search_result import SearchResult
14
+ from app.Services.authentication import force_access_token_verify
15
+ from app.Services.provider import ServiceProvider
16
+ from app.config import config
17
+ from app.util.calculate_vectors_cosine import calculate_vectors_cosine
18
+
19
+ search_router = APIRouter(dependencies=([Depends(force_access_token_verify)] if config.access_protected else None),
20
+ tags=["Search"])
21
+
22
+ services: ServiceProvider | None = None # The service provider will be injected in the webapp initialize
23
+
24
+
25
+ class SearchBasisParams:
26
+ def __init__(self,
27
+ basis: Annotated[SearchBasisEnum, Query(
28
+ description="The basis used to search the image.")] = SearchBasisEnum.vision):
29
+ if basis == SearchBasisEnum.ocr and not config.ocr_search.enable:
30
+ raise HTTPException(400, "OCR search is not enabled.")
31
+ self.basis = basis
32
+
33
+
34
+ async def result_postprocessing(resp: SearchApiResponse) -> SearchApiResponse:
35
+ if not config.storage.method.enabled:
36
+ return resp
37
+ for item in resp.result:
38
+ if item.img.local:
39
+ img_extension = item.img.format or item.img.url.split('.')[-1]
40
+ img_remote_filename = f"{item.img.id}.{img_extension}"
41
+ item.img.url = await services.storage_service.active_storage.presign_url(img_remote_filename)
42
+ if item.img.thumbnail_url is not None and (item.img.local or item.img.local_thumbnail):
43
+ thumbnail_remote_filename = f"thumbnails/{item.img.id}.webp"
44
+ item.img.thumbnail_url = await services.storage_service.active_storage.presign_url(
45
+ thumbnail_remote_filename)
46
+ return resp
47
+
48
+
49
+ @search_router.get("/text/{prompt}", description="Search images by text prompt")
50
+ async def textSearch(
51
+ prompt: Annotated[
52
+ str, Path(max_length=100, description="The image prompt text you want to search.")],
53
+ basis: Annotated[SearchBasisParams, Depends(SearchBasisParams)],
54
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
55
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)],
56
+ exact: Annotated[bool, Query(
57
+ description="If using OCR search, this option will require the ocr text contains **exactly** the "
58
+ "criteria you have given. This won't take any effect in vision search.")] = False
59
+ ) -> SearchApiResponse:
60
+ logger.info("Text search request received, prompt: {}", prompt)
61
+ text_vector = services.transformers_service.get_text_vector(prompt) if basis.basis == SearchBasisEnum.vision \
62
+ else services.transformers_service.get_bert_vector(prompt)
63
+ if basis.basis == SearchBasisEnum.ocr and exact:
64
+ filter_param.ocr_text = prompt
65
+ results = await services.db_context.querySearch(text_vector,
66
+ query_vector_name=services.db_context.vector_name_for_basis(
67
+ basis.basis),
68
+ filter_param=filter_param,
69
+ top_k=paging.count,
70
+ skip=paging.skip)
71
+ return await result_postprocessing(
72
+ SearchApiResponse(result=results, message=f"Successfully get {len(results)} results.", query_id=uuid4()))
73
+
74
+
75
+ @search_router.post("/image", description="Search images by image")
76
+ async def imageSearch(
77
+ image: Annotated[bytes, File(max_length=10 * 1024 * 1024, media_type="image/*",
78
+ description="The image you want to search.")],
79
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
80
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)]
81
+ ) -> SearchApiResponse:
82
+ fakefile = BytesIO(image)
83
+ img = Image.open(fakefile)
84
+ logger.info("Image search request received")
85
+ image_vector = services.transformers_service.get_image_vector(img)
86
+ results = await services.db_context.querySearch(image_vector,
87
+ top_k=paging.count,
88
+ skip=paging.skip,
89
+ filter_param=filter_param)
90
+ return await result_postprocessing(
91
+ SearchApiResponse(result=results, message=f"Successfully get {len(results)} results.", query_id=uuid4()))
92
+
93
+
94
+ @search_router.get("/similar/{image_id}",
95
+ description="Search images similar to the image with given id. "
96
+ "Won't include the given image itself in the result.")
97
+ async def similarWith(
98
+ image_id: Annotated[UUID, Path(description="The id of the image you want to search.")],
99
+ basis: Annotated[SearchBasisParams, Depends(SearchBasisParams)],
100
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
101
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)]
102
+ ) -> SearchApiResponse:
103
+ logger.info("Similar search request received, id: {}", image_id)
104
+ results = await services.db_context.querySimilar(search_id=str(image_id),
105
+ top_k=paging.count,
106
+ skip=paging.skip,
107
+ filter_param=filter_param,
108
+ query_vector_name=services.db_context.vector_name_for_basis(
109
+ basis.basis))
110
+ return await result_postprocessing(
111
+ SearchApiResponse(result=results, message=f"Successfully get {len(results)} results.", query_id=uuid4()))
112
+
113
+
114
+ @search_router.post("/advanced", description="Search with multiple criteria")
115
+ async def advancedSearch(
116
+ model: AdvancedSearchModel,
117
+ basis: Annotated[SearchBasisParams, Depends(SearchBasisParams)],
118
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
119
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)]) -> SearchApiResponse:
120
+ logger.info("Advanced search request received: {}", model)
121
+ result = await process_advanced_and_combined_search_query(model, basis, filter_param, paging)
122
+ return await result_postprocessing(
123
+ SearchApiResponse(result=result, message=f"Successfully get {len(result)} results.", query_id=uuid4()))
124
+
125
+
126
+ @search_router.post("/combined", description="Search with combined criteria")
127
+ async def combinedSearch(
128
+ model: CombinedSearchModel,
129
+ basis: Annotated[SearchBasisParams, Depends(SearchBasisParams)],
130
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
131
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)]) -> SearchApiResponse:
132
+ if not config.ocr_search.enable:
133
+ raise HTTPException(400, "You used combined search, but it needs OCR search which is not "
134
+ "enabled.")
135
+ logger.info("Combined search request received: {}", model)
136
+ result = await process_advanced_and_combined_search_query(model, basis, filter_param, paging, True)
137
+ calculate_and_sort_by_combined_scores(model, basis, result)
138
+ result = result[:paging.count] if len(result) > paging.count else result
139
+ return await result_postprocessing(
140
+ SearchApiResponse(result=result, message=f"Successfully get {len(result)} results.", query_id=uuid4()))
141
+
142
+
143
+ @search_router.get("/random", description="Get random images")
144
+ async def randomPick(
145
+ filter_param: Annotated[FilterParams, Depends(FilterParams)],
146
+ paging: Annotated[SearchPagingParams, Depends(SearchPagingParams)],
147
+ seed: Annotated[int | None, Query(
148
+ description="The seed for random pick. This is helpful for generating a reproducible random pick.")] = None,
149
+ ) -> SearchApiResponse:
150
+ logger.info("Random pick request received")
151
+ random_vector = services.transformers_service.get_random_vector(seed)
152
+ result = await services.db_context.querySearch(random_vector, top_k=paging.count, skip=paging.skip,
153
+ filter_param=filter_param)
154
+ return await result_postprocessing(
155
+ SearchApiResponse(result=result, message=f"Successfully get {len(result)} results.", query_id=uuid4()))
156
+
157
+
158
+ # @search_router.get("/recall/{query_id}", description="Recall the query with given queryId")
159
+ # async def recallQuery(query_id: str):
160
+ # raise NotImplementedError()
161
+
162
+ async def process_advanced_and_combined_search_query(model: AdvancedSearchModel,
163
+ basis: SearchBasisParams,
164
+ filter_param: FilterParams,
165
+ paging: SearchPagingParams,
166
+ is_combined_search=False) -> List[SearchResult]:
167
+ match basis.basis:
168
+ case SearchBasisEnum.ocr:
169
+ positive_vectors = [services.transformers_service.get_bert_vector(t) for t in model.criteria]
170
+ negative_vectors = [services.transformers_service.get_bert_vector(t) for t in model.negative_criteria]
171
+ case SearchBasisEnum.vision:
172
+ positive_vectors = [services.transformers_service.get_text_vector(t) for t in model.criteria]
173
+ negative_vectors = [services.transformers_service.get_text_vector(t) for t in model.negative_criteria]
174
+ case _: # pragma: no cover
175
+ raise NotImplementedError()
176
+ # In order to ensure the query effect of the combined query, modify the actual top_k
177
+ _query_top_k = min(max(30, paging.count * 3), 100) if is_combined_search else paging.count
178
+ result = await services.db_context.querySimilar(
179
+ query_vector_name=services.db_context.vector_name_for_basis(basis.basis),
180
+ positive_vectors=positive_vectors,
181
+ negative_vectors=negative_vectors,
182
+ mode=model.mode,
183
+ filter_param=filter_param,
184
+ with_vectors=is_combined_search,
185
+ top_k=_query_top_k,
186
+ skip=paging.skip)
187
+ return result
188
+
189
+
190
+ def calculate_and_sort_by_combined_scores(model: CombinedSearchModel,
191
+ basis: SearchBasisParams,
192
+ result: List[SearchResult]) -> None:
193
+ # Use a different method to calculate the extra prompt vector based on the basis
194
+ match basis.basis:
195
+ case SearchBasisEnum.ocr:
196
+ extra_prompt_vector = services.transformers_service.get_text_vector(model.extra_prompt)
197
+ case SearchBasisEnum.vision:
198
+ extra_prompt_vector = services.transformers_service.get_bert_vector(model.extra_prompt)
199
+ case _: # pragma: no cover
200
+ raise NotImplementedError()
201
+ # Calculate combined_similar_score (original score * similar_score) and write to SearchResult.score
202
+ for itm in result:
203
+ match basis.basis:
204
+ case SearchBasisEnum.ocr:
205
+ extra_vector = itm.img.image_vector
206
+ case SearchBasisEnum.vision:
207
+ extra_vector = itm.img.text_contain_vector
208
+ case _: # pragma: no cover
209
+ raise NotImplementedError()
210
+ if extra_vector is not None:
211
+ similar_score = calculate_vectors_cosine(extra_vector, extra_prompt_vector)
212
+ itm.score = (1 + similar_score) * itm.score
213
+ # Finally, sort the result by combined_similar_score
214
+ result.sort(key=lambda i: i.score, reverse=True)
app/Models/__init__.py ADDED
File without changes
app/Models/api_models/__init__.py ADDED
File without changes
app/Models/api_models/admin_api_model.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Annotated
2
+
3
+ from pydantic import BaseModel, Field, StringConstraints
4
+
5
+
6
+ class ImageOptUpdateModel(BaseModel):
7
+ starred: Optional[bool] = Field(None,
8
+ description="Whether the image is starred or not. Leave empty to keep the value "
9
+ "unchanged.")
10
+ categories: Optional[list[str]] = Field(None,
11
+ description="The categories of the image. Leave empty to keep the value "
12
+ "unchanged.")
13
+ url: Optional[str] = Field(None,
14
+ description="The url of the image. Leave empty to keep the value unchanged. Changing "
15
+ "the url of a local image is not allowed.")
16
+
17
+ thumbnail_url: Optional[str] = Field(None,
18
+ description="The url of the thumbnail. Leave empty to keep the value "
19
+ "unchanged. Changing the thumbnail_url of an image with a local "
20
+ "thumbnail is not allowed.")
21
+
22
+ def empty(self) -> bool:
23
+ return all([item is None for item in self.model_dump().values()])
24
+
25
+
26
+ Sha1HashString = Annotated[
27
+ str, StringConstraints(min_length=40, max_length=40, pattern=r"[0-9a-f]+", to_lower=True, strip_whitespace=True)]
28
+
29
+
30
+ class DuplicateValidationModel(BaseModel):
31
+ hashes: list[Sha1HashString] = Field(description="The SHA1 hash of the image.", min_length=1)
app/Models/api_models/admin_query_params.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from enum import Enum
2
+ from typing import Optional
3
+
4
+ from fastapi import Query, HTTPException
5
+
6
+
7
+ class UploadImageThumbnailMode(str, Enum):
8
+ IF_NECESSARY = "if_necessary"
9
+ ALWAYS = "always"
10
+ NEVER = "never"
11
+
12
+
13
+ class UploadImageModel:
14
+ def __init__(self,
15
+ url: Optional[str] = Query(None,
16
+ description="The image's url. If the image is local, this field will be "
17
+ "ignored. Otherwise it is required."),
18
+ thumbnail_url: Optional[str] = Query(None,
19
+ description="The image's thumbnail url. If the image is local "
20
+ "or local_thumbnail's value is always, "
21
+ "this field will be ignored. Currently setting a "
22
+ "external thumbnail for a local image is "
23
+ "unsupported due to compatibility issues."),
24
+ categories: Optional[str] = Query(None,
25
+ description="The categories of the image. The entries should be "
26
+ "seperated by comma."),
27
+ starred: bool = Query(False, description="If the image is starred."),
28
+ local: bool = Query(False,
29
+ description="When set to true, the image will be uploaded to local storage. "
30
+ "Otherwise, it will only be indexed in the database."),
31
+ local_thumbnail: UploadImageThumbnailMode =
32
+ Query(default=None,
33
+ description="Whether to generate thumbnail locally. Possible values:\n"
34
+ "- `if_necessary`: Only generate thumbnail if the image is larger than 500KB. "
35
+ "This is the default value if `local=True`\n"
36
+ " - `always`: Always generate thumbnail.\n"
37
+ " - `never`: Never generate thumbnail. This is the default value if `local=False`."),
38
+ skip_ocr: bool = Query(False, description="Whether to skip the OCR process.")):
39
+ self.url = url
40
+ self.thumbnail_url = thumbnail_url
41
+ self.categories = [t.strip() for t in categories.split(',') if t.strip()] if categories else None
42
+ self.starred = starred
43
+ self.local = local
44
+ self.skip_ocr = skip_ocr
45
+ self.local_thumbnail = local_thumbnail if (local_thumbnail is not None) else (
46
+ UploadImageThumbnailMode.IF_NECESSARY if local else UploadImageThumbnailMode.NEVER)
47
+ if not self.url and not self.local:
48
+ raise HTTPException(422, "A correspond url must be provided for a non-local image.")
app/Models/api_models/search_api_model.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from enum import Enum
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class SearchBasisEnum(str, Enum):
7
+ vision = "vision"
8
+ ocr = "ocr"
9
+
10
+
11
+ class SearchModelEnum(str, Enum):
12
+ average = "average"
13
+ best = "best"
14
+
15
+
16
+ class AdvancedSearchModel(BaseModel):
17
+ criteria: list[str] = Field([],
18
+ description="The positive criteria you want to search with",
19
+ max_length=16,
20
+ min_length=1)
21
+ negative_criteria: list[str] = Field([],
22
+ description="The negative criteria you want to search with",
23
+ max_length=16)
24
+ mode: SearchModelEnum = Field(SearchModelEnum.average,
25
+ description="The mode you want to use to combine the criteria.")
26
+
27
+
28
+ class CombinedSearchModel(AdvancedSearchModel):
29
+ extra_prompt: str = Field(max_length=100,
30
+ description="The secondary prompt used for filtering the image.")
app/Models/api_response/admin_api_response.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from uuid import UUID
2
+
3
+ from pydantic import Field
4
+
5
+ from .base import NekoProtocol
6
+
7
+
8
+ class ServerInfoResponse(NekoProtocol):
9
+ image_count: int
10
+ index_queue_length: int
11
+
12
+
13
+ class DuplicateValidationResponse(NekoProtocol):
14
+ entity_ids: list[UUID | None] = Field(
15
+ description="The image id for each hash. If the image does not exist in the server, the value will be null.")
16
+ exists: list[bool] = Field(
17
+ description="Whether the image exists in the server. True if the image exists, False otherwise.")
18
+
19
+
20
+ class ImageUploadResponse(NekoProtocol):
21
+ image_id: UUID
app/Models/api_response/base.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ from pydantic import BaseModel
4
+
5
+
6
+ class NekoProtocol(BaseModel):
7
+ message: str
8
+
9
+
10
+ class WelcomeApiAuthenticationResponse(BaseModel):
11
+ required: bool
12
+ passed: bool
13
+
14
+
15
+ class WelcomeApiAdminPortalAuthenticationResponse(BaseModel):
16
+ available: bool
17
+ passed: bool
18
+
19
+
20
+ class WelcomeApiResponse(NekoProtocol):
21
+ server_time: datetime
22
+ wiki: dict[str, str]
23
+ authorization: WelcomeApiAuthenticationResponse
24
+ admin_api: WelcomeApiAdminPortalAuthenticationResponse
25
+ available_basis: list[str]
app/Models/api_response/images_api_response.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from enum import Enum
2
+
3
+ from pydantic import Field
4
+
5
+ from app.Models.api_response.base import NekoProtocol
6
+ from app.Models.img_data import ImageData
7
+
8
+
9
+ class ImageStatus(str, Enum):
10
+ MAPPED = "mapped"
11
+ IN_QUEUE = "in_queue"
12
+
13
+
14
+ class QueryByIdApiResponse(NekoProtocol):
15
+ img_status: ImageStatus = Field(description="The status of the image.\n"
16
+ "Warning: If NekoImageGallery is deployed in a cluster, "
17
+ "the `in_queue` might not be accurate since the index queue "
18
+ "is independent of each service instance.")
19
+ img: ImageData | None = Field(description="The mapped image data. Only available when `img_status = mapped`.")
20
+
21
+
22
+ class QueryImagesApiResponse(NekoProtocol):
23
+ images: list[ImageData] = Field(description="The list of images.")
24
+ next_page_offset: str | None = Field(description="The offset ID for the next page query. "
25
+ "If there are no more images, this field will be null.")
app/Models/api_response/search_api_response.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from .base import NekoProtocol
2
+ from ..search_result import SearchResult
3
+ from uuid import UUID
4
+
5
+
6
+ class SearchApiResponse(NekoProtocol):
7
+ query_id: UUID
8
+ result: list[SearchResult]
app/Models/errors.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from uuid import UUID
2
+
3
+
4
+ class PointDuplicateError(ValueError):
5
+ def __init__(self, message: str, entity_id: UUID | None = None):
6
+ self.message = message
7
+ self.entity_id = entity_id
8
+ super().__init__(message)
9
+
10
+ pass
app/Models/img_data.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from typing import Optional
3
+ from uuid import UUID
4
+
5
+ from numpy import ndarray
6
+ from pydantic import BaseModel, Field, ConfigDict
7
+
8
+
9
+ class ImageData(BaseModel):
10
+ model_config = ConfigDict(arbitrary_types_allowed=True, extra='ignore')
11
+
12
+ id: UUID
13
+ url: Optional[str] = None
14
+ thumbnail_url: Optional[str] = None
15
+ ocr_text: Optional[str] = None
16
+ image_vector: Optional[ndarray] = Field(None, exclude=True)
17
+ text_contain_vector: Optional[ndarray] = Field(None, exclude=True)
18
+ index_date: datetime
19
+ width: Optional[int] = None
20
+ height: Optional[int] = None
21
+ aspect_ratio: Optional[float] = None
22
+ starred: Optional[bool] = False
23
+ categories: Optional[list[str]] = []
24
+ local: Optional[bool] = False
25
+ local_thumbnail: Optional[bool] = False
26
+ format: Optional[str] = None # required for s3 local storage
27
+
28
+ @property
29
+ def ocr_text_lower(self) -> str | None:
30
+ if self.ocr_text is None:
31
+ return None
32
+ return self.ocr_text.lower()
33
+
34
+ @property
35
+ def payload(self):
36
+ result = self.model_dump(exclude={'id', 'index_date'})
37
+ # Qdrant database cannot accept datetime object, so we have to convert it to string
38
+ result['index_date'] = self.index_date.isoformat()
39
+ # Qdrant doesn't support case-insensitive search, so we need to store a lowercase version of the text
40
+ result['ocr_text_lower'] = self.ocr_text_lower
41
+ return result
42
+
43
+ @classmethod
44
+ def from_payload(cls, img_id: str, payload: dict,
45
+ image_vector: Optional[ndarray] = None, text_contain_vector: Optional[ndarray] = None):
46
+ # Convert the datetime string back to datetime object
47
+ index_date = datetime.fromisoformat(payload['index_date'])
48
+ del payload['index_date']
49
+ return cls(id=UUID(img_id),
50
+ index_date=index_date,
51
+ **payload,
52
+ image_vector=image_vector if image_vector is not None else None,
53
+ text_contain_vector=text_contain_vector if text_contain_vector is not None else None)
app/Models/query_params.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated
2
+
3
+ from fastapi.params import Query
4
+
5
+
6
+ class SearchPagingParams:
7
+ def __init__(
8
+ self,
9
+ count: Annotated[int, Query(ge=1, le=100, description="The number of results you want to get.")] = 10,
10
+ skip: Annotated[int, Query(ge=0, description="The number of results you want to skip.")] = 0
11
+ ):
12
+ self.count = count
13
+ self.skip = skip
14
+
15
+
16
+ class FilterParams:
17
+ def __init__(
18
+ self,
19
+ preferred_ratio: Annotated[
20
+ float | None, Query(gt=0, description="The preferred aspect ratio of the image.")] = None,
21
+ ratio_tolerance: Annotated[
22
+ float, Query(gt=0, lt=1, description="The tolerance of the aspect ratio.")] = 0.1,
23
+ min_width: Annotated[int | None, Query(geq=0, description="The minimum width of the image.")] = None,
24
+ min_height: Annotated[int | None, Query(geq=0, description="The minimum height of the image.")] = None,
25
+ starred: Annotated[bool | None, Query(description="Whether the image is starred.")] = None,
26
+ categories: Annotated[str | None, Query(
27
+ description="The categories whitelist of the image. Image with **any of** the given categories will "
28
+ "be included. The entries should be seperated by comma.",
29
+ examples=["stickers, cg"])] = None,
30
+ categories_negative: Annotated[
31
+ str | None, Query(
32
+ description="The categories blacklist of the image. Image with **any of** the given categories "
33
+ "will be ignored. The entries should be seperated by comma.",
34
+ examples=["stickers, cg"])] = None,
35
+ ):
36
+ self.preferred_ratio = preferred_ratio
37
+ self.ratio_tolerance = ratio_tolerance
38
+ self.min_width = min_width
39
+ self.min_height = min_height
40
+ self.starred = starred
41
+ self.categories = [t.strip() for t in categories.split(',') if t.strip()] if categories else None
42
+ self.categories_negative = [t.strip() for t in categories_negative.split(',') if
43
+ t.strip()] if categories_negative else None
44
+ self.ocr_text = None # For exact search
45
+
46
+ @property
47
+ def min_ratio(self) -> float | None:
48
+ if self.preferred_ratio is None:
49
+ return None
50
+ return self.preferred_ratio * (1 - self.ratio_tolerance)
51
+
52
+ @property
53
+ def max_ratio(self) -> float | None:
54
+ if self.preferred_ratio is None:
55
+ return None
56
+ return self.preferred_ratio * (1 + self.ratio_tolerance)
app/Models/search_result.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+ from .img_data import ImageData
3
+
4
+
5
+ class SearchResult(BaseModel):
6
+ img: ImageData
7
+ score: float
app/Services/__init__.py ADDED
File without changes
app/Services/authentication.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated
2
+
3
+ from fastapi import HTTPException
4
+ from fastapi.params import Header, Depends
5
+
6
+ from app.config import config
7
+
8
+
9
+ def verify_access_token(token: str | None) -> bool:
10
+ return (not config.access_protected) or (token is not None and token == config.access_token)
11
+
12
+
13
+ def permissive_access_token_verify(
14
+ x_access_token: Annotated[str | None, Header(
15
+ description="Access token set in configuration (if access_protected is enabled)")] = None) -> bool:
16
+ return verify_access_token(x_access_token)
17
+
18
+
19
+ def force_access_token_verify(token_passed: Annotated[bool, Depends(permissive_access_token_verify)]):
20
+ if not token_passed:
21
+ raise HTTPException(status_code=401, detail="Access token is not present or invalid.")
22
+
23
+
24
+ def permissive_admin_token_verify(
25
+ x_admin_token: Annotated[str | None, Header(
26
+ description="Admin token set in configuration (if admin_api_enable is enabled)")] = None) -> bool:
27
+ return config.admin_api_enable and x_admin_token == config.admin_token
28
+
29
+
30
+ def force_admin_token_verify(token_passed: Annotated[bool, Depends(permissive_admin_token_verify)]):
31
+ if not token_passed:
32
+ raise HTTPException(status_code=401, detail="Admin token is not present or invalid.")
app/Services/index_service.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ from fastapi.concurrency import run_in_threadpool
3
+
4
+ from app.Models.errors import PointDuplicateError
5
+ from app.Models.img_data import ImageData
6
+ from app.Services.lifespan_service import LifespanService
7
+ from app.Services.ocr_services import OCRService
8
+ from app.Services.transformers_service import TransformersService
9
+ from app.Services.vector_db_context import VectorDbContext
10
+ from app.config import config
11
+
12
+
13
+ class IndexService(LifespanService):
14
+ def __init__(self, ocr_service: OCRService, transformers_service: TransformersService, db_context: VectorDbContext):
15
+ self._ocr_service = ocr_service
16
+ self._transformers_service = transformers_service
17
+ self._db_context = db_context
18
+
19
+ def _prepare_image(self, image: Image.Image, image_data: ImageData, skip_ocr=False):
20
+ image_data.width = image.width
21
+ image_data.height = image.height
22
+ image_data.aspect_ratio = float(image.width) / image.height
23
+
24
+ if image.mode != 'RGB':
25
+ image = image.convert('RGB') # to reduce convert in next steps
26
+ else:
27
+ image = image.copy()
28
+ image_data.image_vector = self._transformers_service.get_image_vector(image)
29
+ if not skip_ocr and config.ocr_search.enable:
30
+ image_data.ocr_text = self._ocr_service.ocr_interface(image)
31
+ if image_data.ocr_text != "":
32
+ image_data.text_contain_vector = self._transformers_service.get_bert_vector(image_data.ocr_text)
33
+ else:
34
+ image_data.ocr_text = None
35
+
36
+ # currently, here only need just a simple check
37
+ async def _is_point_duplicate(self, image_data: list[ImageData]) -> bool:
38
+ image_id_list = [str(item.id) for item in image_data]
39
+ result = await self._db_context.validate_ids(image_id_list)
40
+ return len(result) != 0
41
+
42
+ async def index_image(self, image: Image.Image, image_data: ImageData, skip_ocr=False, skip_duplicate_check=False,
43
+ background=False):
44
+ if not skip_duplicate_check and (await self._is_point_duplicate([image_data])):
45
+ raise PointDuplicateError("The uploaded points are contained in the database!", image_data.id)
46
+
47
+ if background:
48
+ await run_in_threadpool(self._prepare_image, image, image_data, skip_ocr)
49
+ else:
50
+ self._prepare_image(image, image_data, skip_ocr)
51
+
52
+ await self._db_context.insertItems([image_data])
53
+
54
+ async def index_image_batch(self, image: list[Image.Image], image_data: list[ImageData],
55
+ skip_ocr=False, allow_overwrite=False):
56
+ if not allow_overwrite and (await self._is_point_duplicate(image_data)):
57
+ raise PointDuplicateError("The uploaded points are contained in the database!")
58
+ for img, img_data in zip(image, image_data):
59
+ self._prepare_image(img, img_data, skip_ocr)
60
+ await self._db_context.insertItems(image_data)
app/Services/lifespan_service.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ class LifespanService:
2
+ async def on_load(self):
3
+ pass
4
+
5
+ async def on_exit(self):
6
+ pass
app/Services/ocr_services.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from time import time
2
+
3
+ import numpy as np
4
+ import torch
5
+ from PIL import Image
6
+ from loguru import logger
7
+
8
+ from app.Services.lifespan_service import LifespanService
9
+ from app.config import config
10
+
11
+
12
+ class OCRService(LifespanService):
13
+ def __init__(self):
14
+ self._device = config.device
15
+ if self._device == "auto":
16
+ self._device = "cuda" if torch.cuda.is_available() else "cpu"
17
+
18
+ @staticmethod
19
+ def _image_preprocess(img: Image.Image) -> Image.Image:
20
+ if img.mode != 'RGB':
21
+ img = img.convert('RGB')
22
+ if img.size[0] > 1024 or img.size[1] > 1024:
23
+ img.thumbnail((1024, 1024), Image.Resampling.LANCZOS)
24
+ new_img = Image.new('RGB', (1024, 1024), (0, 0, 0))
25
+ new_img.paste(img, ((1024 - img.size[0]) // 2, (1024 - img.size[1]) // 2))
26
+ return new_img
27
+
28
+ def ocr_interface(self, img: Image.Image, need_preprocess=True) -> str:
29
+ pass
30
+
31
+
32
+ class EasyPaddleOCRService(OCRService):
33
+ def __init__(self):
34
+ super().__init__()
35
+ from easypaddleocr import EasyPaddleOCR
36
+ self._paddle_ocr_module = EasyPaddleOCR(use_angle_cls=True,
37
+ needWarmUp=True,
38
+ devices=self._device,
39
+ warmup_size=(960, 960),
40
+ model_local_dir=config.model.easypaddleocr if
41
+ config.model.easypaddleocr else None)
42
+ logger.success("EasyPaddleOCR loaded successfully")
43
+
44
+ @staticmethod
45
+ def _image_preprocess(img: Image.Image) -> Image.Image:
46
+ # Optimized `easypaddleocr` doesn't require scaling preprocess
47
+ if img.mode != 'RGB':
48
+ img = img.convert('RGB')
49
+ return img
50
+
51
+ def _easy_paddleocr_process(self, img: Image.Image) -> str:
52
+ _, ocr_result, _ = self._paddle_ocr_module.ocr(np.array(img))
53
+ if ocr_result:
54
+ return "".join(itm[0] for itm in ocr_result if float(itm[1]) > config.ocr_search.ocr_min_confidence)
55
+ return ""
56
+
57
+ def ocr_interface(self, img: Image.Image, need_preprocess=True) -> str:
58
+ start_time = time()
59
+ logger.info("Processing text with EasyPaddleOCR...")
60
+ res = self._easy_paddleocr_process(self._image_preprocess(img) if need_preprocess else img)
61
+ logger.success("OCR processed done. Time elapsed: {:.2f}s", time() - start_time)
62
+ return res
63
+
64
+
65
+ class EasyOCRService(OCRService):
66
+ def __init__(self):
67
+ super().__init__()
68
+ # noinspection PyPackageRequirements
69
+ import easyocr # pylint: disable=import-error
70
+ self._easy_ocr_module = easyocr.Reader(config.ocr_search.ocr_language,
71
+ gpu=self._device == "cuda")
72
+ logger.success("easyOCR loaded successfully")
73
+
74
+ def _easyocr_process(self, img: Image.Image) -> str:
75
+ ocr_result = self._easy_ocr_module.readtext(np.array(img))
76
+ return " ".join(itm[1] for itm in ocr_result if itm[2] > config.ocr_search.ocr_min_confidence)
77
+
78
+ def ocr_interface(self, img: Image.Image, need_preprocess=True) -> str:
79
+ start_time = time()
80
+ logger.info("Processing text with easyOCR...")
81
+ res = self._easyocr_process(self._image_preprocess(img) if need_preprocess else img)
82
+ logger.success("OCR processed done. Time elapsed: {:.2f}s", time() - start_time)
83
+ return res
84
+
85
+
86
+ class PaddleOCRService(OCRService):
87
+ def __init__(self):
88
+ super().__init__()
89
+ # noinspection PyPackageRequirements
90
+ import paddleocr # pylint: disable=import-error
91
+ self._paddle_ocr_module = paddleocr.PaddleOCR(lang="ch", use_angle_cls=True,
92
+ use_gpu=self._device == "cuda")
93
+ logger.success("PaddleOCR loaded successfully")
94
+
95
+ def _paddleocr_process(self, img: Image.Image) -> str:
96
+ ocr_result = self._paddle_ocr_module.ocr(np.array(img), cls=True)
97
+ if ocr_result[0]:
98
+ return "".join(itm[1][0] for itm in ocr_result[0] if itm[1][1] > config.ocr_search.ocr_min_confidence)
99
+ return ""
100
+
101
+ def ocr_interface(self, img: Image.Image, need_preprocess=True) -> str:
102
+ start_time = time()
103
+ logger.info("Processing text with PaddleOCR...")
104
+ res = self._paddleocr_process(self._image_preprocess(img) if need_preprocess else img)
105
+ logger.success("OCR processed done. Time elapsed: {:.2f}s", time() - start_time)
106
+ return res
107
+
108
+
109
+ class DisabledOCRService(OCRService):
110
+ def __init__(self):
111
+ super().__init__()
112
+ logger.warning("OCR search is disabled. Skipping OCR model loading.")
113
+
114
+ def ocr_interface(self, img: Image.Image, need_preprocess=True) -> str:
115
+ raise NotImplementedError("OCR module is disabled. Consider enable it in config.")
app/Services/provider.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from loguru import logger
3
+
4
+ from .index_service import IndexService
5
+ from .lifespan_service import LifespanService
6
+ from .storage import StorageService
7
+ from .transformers_service import TransformersService
8
+ from .upload_service import UploadService
9
+ from .vector_db_context import VectorDbContext
10
+ from ..config import config, environment
11
+
12
+
13
+ class ServiceProvider:
14
+ def __init__(self):
15
+ self.transformers_service = TransformersService()
16
+ self.db_context = VectorDbContext()
17
+ self.ocr_service = None
18
+
19
+ if config.ocr_search.enable and (environment.local_indexing or config.admin_api_enable):
20
+ match config.ocr_search.ocr_module:
21
+ case "easyocr":
22
+ from .ocr_services import EasyOCRService
23
+
24
+ self.ocr_service = EasyOCRService()
25
+ case "easypaddleocr":
26
+ from .ocr_services import EasyPaddleOCRService
27
+
28
+ self.ocr_service = EasyPaddleOCRService()
29
+ case "paddleocr":
30
+ from .ocr_services import PaddleOCRService
31
+
32
+ self.ocr_service = PaddleOCRService()
33
+ case _:
34
+ raise NotImplementedError(f"OCR module {config.ocr_search.ocr_module} not implemented.")
35
+ else:
36
+ from .ocr_services import DisabledOCRService
37
+
38
+ self.ocr_service = DisabledOCRService()
39
+ logger.info(f"OCR service '{type(self.ocr_service).__name__}' initialized.")
40
+
41
+ self.index_service = IndexService(self.ocr_service, self.transformers_service, self.db_context)
42
+ self.storage_service = StorageService()
43
+ logger.info(f"Storage service '{type(self.storage_service.active_storage).__name__}' initialized.")
44
+
45
+ self.upload_service = UploadService(self.storage_service, self.db_context, self.index_service)
46
+ logger.info(f"Upload service '{type(self.upload_service).__name__}' initialized")
47
+
48
+ async def onload(self):
49
+ tasks = [service.on_load() for service_name in dir(self)
50
+ if isinstance((service := getattr(self, service_name)), LifespanService)]
51
+ await asyncio.gather(*tasks)
52
+
53
+ async def onexit(self):
54
+ tasks = [service.on_exit() for service_name in dir(self)
55
+ if isinstance((service := getattr(self, service_name)), LifespanService)]
56
+ await asyncio.gather(*tasks)
app/Services/storage/__init__.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from app.Services.lifespan_service import LifespanService
2
+ from app.Services.storage.base import BaseStorage
3
+ from app.Services.storage.disabled_storage import DisabledStorage
4
+ from app.Services.storage.local_storage import LocalStorage
5
+ from app.Services.storage.s3_compatible_storage import S3Storage
6
+ from app.config import config, StorageMode
7
+
8
+
9
+ class StorageService(LifespanService):
10
+ def __init__(self):
11
+ self.active_storage = None
12
+ match config.storage.method:
13
+ case StorageMode.LOCAL:
14
+ self.active_storage = LocalStorage()
15
+ case StorageMode.S3:
16
+ self.active_storage = S3Storage()
17
+ case StorageMode.DISABLED:
18
+ self.active_storage = DisabledStorage()
19
+ case _:
20
+ raise NotImplementedError(f"Storage method {config.storage.method} not implemented. "
21
+ f"Available methods: local, s3")
22
+
23
+ async def on_load(self):
24
+ await self.active_storage.on_load()
25
+
26
+ async def on_exit(self):
27
+ await self.active_storage.on_exit()
app/Services/storage/base.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import os
3
+ from typing import TypeVar, Generic, TypeAlias, Optional, AsyncGenerator
4
+
5
+ from app.Services.lifespan_service import LifespanService
6
+
7
+ FileMetaDataT = TypeVar('FileMetaDataT')
8
+
9
+ PathLikeType: TypeAlias = str | os.PathLike
10
+ LocalFilePathType: TypeAlias = PathLikeType | bytes
11
+ RemoteFilePathType: TypeAlias = PathLikeType
12
+ LocalFileMetaDataType: TypeAlias = FileMetaDataT
13
+ RemoteFileMetaDataType: TypeAlias = FileMetaDataT
14
+
15
+
16
+ class BaseStorage(LifespanService, abc.ABC, Generic[FileMetaDataT]):
17
+ def __init__(self):
18
+ self.static_dir: os.PathLike
19
+ self.thumbnails_dir: os.PathLike
20
+ self.deleted_dir: os.PathLike
21
+ self.file_metadata: FileMetaDataT
22
+
23
+ @abc.abstractmethod
24
+ async def is_exist(self,
25
+ remote_file: RemoteFilePathType) -> bool:
26
+ """
27
+ Check if a remote_file exists.
28
+ :param remote_file: The file path relative to static_dir
29
+ :return: True if the file exists, False otherwise
30
+ """
31
+ raise NotImplementedError
32
+
33
+ @abc.abstractmethod
34
+ async def size(self,
35
+ remote_file: RemoteFilePathType) -> int:
36
+ """
37
+ Get the size of a file in static_dir
38
+ :param remote_file: The file path relative to static_dir
39
+ :return: file's size
40
+ """
41
+ raise NotImplementedError
42
+
43
+ @abc.abstractmethod
44
+ async def url(self,
45
+ remote_file: RemoteFilePathType) -> str:
46
+ """
47
+ Get the original URL of a file in static_dir.
48
+ This url will be placed in the payload field of the qdrant.
49
+ :param remote_file: The file path relative to static_dir
50
+ :return: file's "original URL"
51
+ """
52
+ raise NotImplementedError
53
+
54
+ @abc.abstractmethod
55
+ async def presign_url(self,
56
+ remote_file: RemoteFilePathType,
57
+ expire_second: int = 3600) -> str:
58
+ """
59
+ Get the presign URL of a file in static_dir.
60
+ :param remote_file: The file path relative to static_dir
61
+ :param expire_second: Valid time for presign url
62
+ :return: file's "presign URL"
63
+ """
64
+ raise NotImplementedError
65
+
66
+ @abc.abstractmethod
67
+ async def fetch(self,
68
+ remote_file: RemoteFilePathType) -> bytes:
69
+ """
70
+ Fetch a file from static_dir
71
+ :param remote_file: The file path relative to static_dir
72
+ :return: file's content
73
+ """
74
+ raise NotImplementedError
75
+
76
+ @abc.abstractmethod
77
+ async def upload(self,
78
+ local_file: "LocalFilePathType",
79
+ remote_file: RemoteFilePathType) -> None:
80
+ """
81
+ Move a local picture file to the static_dir.
82
+ :param local_file: The absolute path to the local file or bytes.
83
+ :param remote_file: The file path relative to static_dir
84
+ """
85
+ raise NotImplementedError
86
+
87
+ @abc.abstractmethod
88
+ async def copy(self,
89
+ old_remote_file: RemoteFilePathType,
90
+ new_remote_file: RemoteFilePathType) -> None:
91
+ """
92
+ Copy a file in static_dir.
93
+ :param old_remote_file: The file path relative to static_dir
94
+ :param new_remote_file: The file path relative to static_dir
95
+ """
96
+ raise NotImplementedError
97
+
98
+ @abc.abstractmethod
99
+ async def move(self,
100
+ old_remote_file: RemoteFilePathType,
101
+ new_remote_file: RemoteFilePathType) -> None:
102
+ """
103
+ Move a file in static_dir.
104
+ :param old_remote_file: The file path relative to static_dir
105
+ :param new_remote_file: The file path relative to static_dir
106
+ """
107
+ raise NotImplementedError
108
+
109
+ @abc.abstractmethod
110
+ async def delete(self,
111
+ remote_file: RemoteFilePathType) -> None:
112
+ """
113
+ Move a file in static_dir.
114
+ :param remote_file: The file path relative to static_dir
115
+ """
116
+ raise NotImplementedError
117
+
118
+ @abc.abstractmethod
119
+ async def list_files(self,
120
+ path: RemoteFilePathType,
121
+ pattern: Optional[str] = "*",
122
+ batch_max_files: Optional[int] = None,
123
+ valid_extensions: Optional[set[str]] = None) \
124
+ -> AsyncGenerator[list[RemoteFilePathType], None]:
125
+ """
126
+ Asynchronously generates a list of files from a given base directory path that match a specified pattern and set
127
+ of file extensions.
128
+
129
+ :param path: The relative base directory path from which relative to static_dir to start listing files.
130
+ :param pattern: A glob pattern to filter files based on their names. Defaults to '*' which selects all files.
131
+ :param batch_max_files: The maximum number of files to return. If None, all matching files are returned.
132
+ :param valid_extensions: An extra set of file extensions to include (e.g., {".jpg", ".png"}).
133
+ If None, files are not filtered by extension.
134
+ :return: An asynchronous generator yielding lists of RemoteFilePathType objects representing the matching files.
135
+
136
+ Usage example:
137
+ async for batch in list_files(base_path=".", pattern="*", max_files=100, valid_extensions={".jpg", ".png"}):
138
+ print(f"Batch: {batch}")
139
+ """
140
+ raise NotImplementedError
141
+
142
+ @abc.abstractmethod
143
+ async def update_metadata(self,
144
+ local_file_metadata: LocalFileMetaDataType,
145
+ remote_file_metadata: RemoteFileMetaDataType) -> None:
146
+ raise NotImplementedError
app/Services/storage/disabled_storage.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, AsyncGenerator
2
+
3
+ from app.Services.storage import BaseStorage
4
+ from app.Services.storage.base import RemoteFilePathType, LocalFileMetaDataType, RemoteFileMetaDataType, \
5
+ LocalFilePathType
6
+
7
+
8
+ class DisabledStorage(BaseStorage): # pragma: no cover
9
+ async def size(self, remote_file: RemoteFilePathType) -> int:
10
+ raise NotImplementedError
11
+
12
+ async def url(self, remote_file: RemoteFilePathType) -> str:
13
+ raise NotImplementedError
14
+
15
+ async def presign_url(self, remote_file: RemoteFilePathType, expire_second: int = 3600) -> str:
16
+ raise NotImplementedError
17
+
18
+ async def fetch(self, remote_file: RemoteFilePathType) -> bytes:
19
+ raise NotImplementedError
20
+
21
+ async def upload(self, local_file: "LocalFilePathType", remote_file: RemoteFilePathType) -> None:
22
+ raise NotImplementedError
23
+
24
+ async def copy(self, old_remote_file: RemoteFilePathType, new_remote_file: RemoteFilePathType) -> None:
25
+ raise NotImplementedError
26
+
27
+ async def move(self, old_remote_file: RemoteFilePathType, new_remote_file: RemoteFilePathType) -> None:
28
+ raise NotImplementedError
29
+
30
+ async def delete(self, remote_file: RemoteFilePathType) -> None:
31
+ raise NotImplementedError
32
+
33
+ async def update_metadata(self, local_file_metadata: LocalFileMetaDataType,
34
+ remote_file_metadata: RemoteFileMetaDataType) -> None:
35
+ raise NotImplementedError
36
+
37
+ async def list_files(self, path: RemoteFilePathType, pattern: Optional[str] = "*",
38
+ batch_max_files: Optional[int] = None, valid_extensions: Optional[set[str]] = None) -> \
39
+ AsyncGenerator[list[RemoteFilePathType], None]:
40
+ raise NotImplementedError
41
+
42
+ async def is_exist(self, remote_file: RemoteFilePathType) -> bool:
43
+ raise NotImplementedError
app/Services/storage/exception.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ class StorageExtension(Exception):
2
+ pass
3
+
4
+
5
+ class LocalFileNotFoundError(StorageExtension):
6
+ pass
7
+
8
+
9
+ class LocalFileExistsError(StorageExtension):
10
+ pass
11
+
12
+
13
+ class LocalFilePermissionError(StorageExtension):
14
+ pass
15
+
16
+
17
+ class RemoteFileNotFoundError(StorageExtension):
18
+ pass
19
+
20
+
21
+ class RemoteFileExistsError(StorageExtension):
22
+ pass
23
+
24
+
25
+ class RemoteFilePermissionError(StorageExtension):
26
+ pass
27
+
28
+
29
+ class RemoteConnectError(StorageExtension):
30
+ pass
app/Services/storage/local_storage.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from asyncio import to_thread
3
+ from pathlib import Path as syncPath
4
+ from shutil import copy2, move
5
+ from typing import Optional, AsyncGenerator
6
+
7
+ import aiofiles
8
+ from loguru import logger
9
+
10
+ from app.Services.storage.base import BaseStorage, FileMetaDataT, RemoteFilePathType, LocalFilePathType
11
+ from app.Services.storage.exception import RemoteFileNotFoundError, LocalFileNotFoundError, RemoteFilePermissionError, \
12
+ LocalFilePermissionError, LocalFileExistsError, RemoteFileExistsError
13
+ from app.config import config
14
+ from app.util.local_file_utility import glob_local_files
15
+
16
+
17
+ def transform_exception(param: str):
18
+ file_not_found_exp_map = {"local": LocalFileNotFoundError, "remote": RemoteFileNotFoundError}
19
+ permission_exp_map = {"remote": RemoteFilePermissionError, "local": LocalFilePermissionError}
20
+ file_exist_map = {"local": LocalFileExistsError, "remote": RemoteFileExistsError}
21
+
22
+ def decorator(func):
23
+ async def wrapper(*args, **kwargs):
24
+ try:
25
+ return await func(*args, **kwargs)
26
+ except FileNotFoundError as ex:
27
+ raise file_not_found_exp_map[param] from ex
28
+ except PermissionError as ex:
29
+ raise permission_exp_map[param] from ex
30
+ except FileExistsError as ex:
31
+ raise file_exist_map[param] from ex
32
+
33
+ return wrapper
34
+
35
+ return decorator
36
+
37
+
38
+ class LocalStorage(BaseStorage[FileMetaDataT: None]):
39
+ def __init__(self):
40
+ super().__init__()
41
+ self.static_dir = syncPath(os.path.abspath(config.storage.local.path))
42
+ self.thumbnails_dir = self.static_dir / "thumbnails"
43
+ self.deleted_dir = self.static_dir / "_deleted"
44
+ self.file_metadata = None
45
+ self.file_path_warp = lambda x: self.static_dir / syncPath(x)
46
+
47
+ def file_path_wrap(self, path: RemoteFilePathType) -> syncPath:
48
+ return self.static_dir / syncPath(path)
49
+
50
+ async def on_load(self):
51
+ if not self.static_dir.is_dir():
52
+ self.static_dir.mkdir(parents=True)
53
+ logger.warning(f"static_dir {self.static_dir} not found, created.")
54
+ if not self.thumbnails_dir.is_dir():
55
+ self.thumbnails_dir.mkdir(parents=True)
56
+ logger.warning(f"thumbnails_dir {self.thumbnails_dir} not found, created.")
57
+ if not self.deleted_dir.is_dir():
58
+ self.deleted_dir.mkdir(parents=True)
59
+ logger.warning(f"deleted_dir {self.deleted_dir} not found, created.")
60
+
61
+ async def is_exist(self,
62
+ remote_file: "RemoteFilePathType") -> bool:
63
+ return self.file_path_warp(remote_file).exists()
64
+
65
+ @transform_exception("remote")
66
+ async def size(self,
67
+ remote_file: "RemoteFilePathType") -> int:
68
+ _file = self.file_path_warp(remote_file)
69
+ return self.file_path_warp(remote_file).stat().st_size
70
+
71
+ # noinspection PyMethodMayBeStatic
72
+ async def url(self,
73
+ remote_file: "RemoteFilePathType") -> str:
74
+ return f"/static/{str(remote_file)}"
75
+
76
+ async def presign_url(self,
77
+ remote_file: "RemoteFilePathType",
78
+ expire_second: int = 3600) -> str:
79
+ return f"/static/{str(remote_file)}"
80
+
81
+ @transform_exception("remote")
82
+ async def fetch(self,
83
+ remote_file: "RemoteFilePathType") -> bytes:
84
+ remote_file = self.file_path_warp(remote_file)
85
+ async with aiofiles.open(str(remote_file), 'rb') as file:
86
+ return await file.read()
87
+
88
+ @transform_exception("local")
89
+ async def upload(self,
90
+ local_file: "LocalFilePathType",
91
+ remote_file: "RemoteFilePathType") -> None:
92
+ remote_file = self.file_path_warp(remote_file)
93
+ if isinstance(local_file, bytes):
94
+ async with aiofiles.open(str(remote_file), 'wb') as file:
95
+ await file.write(local_file)
96
+ else:
97
+ await to_thread(copy2, str(local_file), str(remote_file))
98
+ local_file = f"{len(local_file)} bytes" if isinstance(local_file, bytes) else local_file
99
+ logger.success(f"Successfully uploaded file {str(local_file)} to {str(remote_file)} via local_storage.")
100
+
101
+ @transform_exception("remote")
102
+ async def copy(self,
103
+ old_remote_file: "RemoteFilePathType",
104
+ new_remote_file: "RemoteFilePathType") -> None:
105
+ old_remote_file = self.file_path_warp(old_remote_file)
106
+ new_remote_file = self.file_path_warp(new_remote_file)
107
+ await to_thread(copy2, str(old_remote_file), str(new_remote_file))
108
+ logger.success(f"Successfully copied file {str(old_remote_file)} to {str(new_remote_file)} via local_storage.")
109
+
110
+ @transform_exception("remote")
111
+ async def move(self,
112
+ old_remote_file: "RemoteFilePathType",
113
+ new_remote_file: "RemoteFilePathType") -> None:
114
+ old_remote_file = self.file_path_warp(old_remote_file)
115
+ new_remote_file = self.file_path_warp(new_remote_file)
116
+ await to_thread(move, str(old_remote_file), str(new_remote_file), copy_function=copy2)
117
+ logger.success(f"Successfully moved file {str(old_remote_file)} to {str(new_remote_file)} via local_storage.")
118
+
119
+ @transform_exception("remote")
120
+ async def delete(self,
121
+ remote_file: "RemoteFilePathType") -> None:
122
+ remote_file = self.file_path_warp(remote_file)
123
+ await to_thread(os.remove, str(remote_file))
124
+ logger.success(f"Successfully deleted file {str(remote_file)} via local_storage.")
125
+
126
+ async def list_files(self,
127
+ path: RemoteFilePathType,
128
+ pattern: Optional[str] = "*",
129
+ batch_max_files: Optional[int] = None,
130
+ valid_extensions: Optional[set[str]] = None) \
131
+ -> AsyncGenerator[list[RemoteFilePathType], None]:
132
+ local_path = self.file_path_warp(path)
133
+ files = []
134
+ for file in glob_local_files(local_path, pattern, valid_extensions):
135
+ files.append(file)
136
+ if batch_max_files is not None and len(files) == batch_max_files:
137
+ yield files
138
+ files = []
139
+ if files:
140
+ yield files
141
+
142
+ async def update_metadata(self,
143
+ local_file_metadata: None,
144
+ remote_file_metadata: None) -> None:
145
+ raise NotImplementedError
app/Services/storage/s3_compatible_storage.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # pylint now reporting `opendal` as a `no-name-in-module` error, so we need to disable it as a temporary workaround
2
+ # Related issue: https://github.com/pylint-dev/pylint/issues/9185
3
+ # Remove below `# pylint` once the issue is resolved
4
+ # pylint: disable=import-error,no-name-in-module
5
+ import os
6
+ import urllib.parse
7
+ from pathlib import PurePosixPath
8
+ from typing import Optional, AsyncGenerator
9
+
10
+ import aiofiles
11
+ from loguru import logger
12
+ from opendal import AsyncOperator
13
+ from opendal.exceptions import NotFound, PermissionDenied, AlreadyExists
14
+ from wcmatch import glob
15
+
16
+ from app.Services.storage.base import BaseStorage, FileMetaDataT, RemoteFilePathType, LocalFilePathType, \
17
+ LocalFileMetaDataType, RemoteFileMetaDataType
18
+ from app.Services.storage.exception import LocalFileNotFoundError, RemoteFileNotFoundError, RemoteFilePermissionError, \
19
+ RemoteFileExistsError
20
+ from app.config import config
21
+ from app.util.local_file_utility import VALID_IMAGE_EXTENSIONS
22
+
23
+
24
+ def transform_exception(func):
25
+ async def wrapper(*args, **kwargs):
26
+ try:
27
+ return await func(*args, **kwargs)
28
+ except FileNotFoundError as ex:
29
+ raise LocalFileNotFoundError from ex
30
+ except NotFound as ex:
31
+ raise RemoteFileNotFoundError from ex
32
+ except PermissionDenied as ex:
33
+ raise RemoteFilePermissionError from ex
34
+ except AlreadyExists as ex:
35
+ raise RemoteFileExistsError from ex
36
+
37
+ return wrapper
38
+
39
+
40
+ class S3Storage(BaseStorage[FileMetaDataT: None]):
41
+ def __init__(self):
42
+ super().__init__()
43
+
44
+ # Paths
45
+ self.static_dir = PurePosixPath(config.storage.s3.path)
46
+ self.thumbnails_dir = self.static_dir / "thumbnails"
47
+ self.deleted_dir = self.static_dir / "_deleted"
48
+
49
+ self.file_metadata = None
50
+ self.bucket = config.storage.s3.bucket
51
+ self.region = config.storage.s3.region
52
+ self.endpoint = config.storage.s3.endpoint_url
53
+
54
+ self.op = AsyncOperator("s3",
55
+ root=str(self.static_dir),
56
+ bucket=self.bucket,
57
+ region=self.region,
58
+ endpoint=self.endpoint,
59
+ access_key_id=config.storage.s3.access_key_id,
60
+ secret_access_key=config.storage.s3.secret_access_key)
61
+
62
+ self._file_path_str_warp = lambda x: str(PurePosixPath(x))
63
+
64
+ @staticmethod
65
+ def _file_path_str_wrap(p: RemoteFilePathType):
66
+ return str(PurePosixPath(p))
67
+
68
+ async def is_exist(self,
69
+ remote_file: "RemoteFilePathType") -> bool:
70
+ try:
71
+ # the easiest way to confirm the existence of a file
72
+ await self.op.stat(self._file_path_str_warp(remote_file))
73
+ return True
74
+ except NotFound:
75
+ return False
76
+
77
+ @transform_exception
78
+ async def size(self,
79
+ remote_file: "RemoteFilePathType") -> int:
80
+ _stat = await self.op.stat(self._file_path_str_warp(remote_file))
81
+ return _stat.content_length
82
+
83
+ @transform_exception
84
+ async def url(self,
85
+ remote_file: "RemoteFilePathType") -> str:
86
+ return f"{self._res_endpoint}/{str(self.static_dir)}/{str(remote_file)}"
87
+
88
+ @transform_exception
89
+ async def presign_url(self,
90
+ remote_file: "RemoteFilePathType",
91
+ expire_second: int = 3600) -> str:
92
+ _presign = await self.op.presign_read(self._file_path_str_warp(remote_file), expire_second)
93
+ return _presign.url
94
+
95
+ @transform_exception
96
+ async def fetch(self,
97
+ remote_file: "RemoteFilePathType") -> bytes:
98
+ with await self.op.read(self._file_path_str_warp(remote_file)) as f:
99
+ return bytes(f)
100
+
101
+ @transform_exception
102
+ async def upload(self,
103
+ local_file: "LocalFilePathType",
104
+ remote_file: "RemoteFilePathType") -> None:
105
+ if isinstance(local_file, bytes):
106
+ b = local_file
107
+ else:
108
+ async with aiofiles.open(local_file, "rb") as f:
109
+ b = await f.read()
110
+ await self.op.write(self._file_path_str_warp(remote_file), b)
111
+ local_file = f"{len(local_file)} bytes" if isinstance(local_file, bytes) else local_file
112
+ logger.success(f"Successfully uploaded file {str(local_file)} to {str(remote_file)} via s3_storage.")
113
+
114
+ @transform_exception
115
+ async def copy(self,
116
+ old_remote_file: "RemoteFilePathType",
117
+ new_remote_file: "RemoteFilePathType") -> None:
118
+ await self.op.copy(self._file_path_str_warp(old_remote_file), self._file_path_str_warp(new_remote_file))
119
+ logger.success(f"Successfully copied file {str(old_remote_file)} to {str(new_remote_file)} via s3_storage.")
120
+
121
+ @transform_exception
122
+ async def move(self,
123
+ old_remote_file: "RemoteFilePathType",
124
+ new_remote_file: "RemoteFilePathType") -> None:
125
+ await self.op.copy(self._file_path_str_warp(old_remote_file), self._file_path_str_warp(new_remote_file))
126
+ await self.op.delete(self._file_path_str_warp(old_remote_file))
127
+ logger.success(f"Successfully moved file {str(old_remote_file)} to {str(new_remote_file)} via s3_storage.")
128
+
129
+ @transform_exception
130
+ async def delete(self,
131
+ remote_file: "RemoteFilePathType") -> None:
132
+ await self.op.delete(self._file_path_str_warp(remote_file))
133
+ logger.success(f"Successfully deleted file {str(remote_file)} via s3_storage.")
134
+
135
+ async def list_files(self,
136
+ path: RemoteFilePathType,
137
+ pattern: Optional[str] = "*",
138
+ batch_max_files: Optional[int] = None,
139
+ valid_extensions: Optional[set[str]] = None) \
140
+ -> AsyncGenerator[list[RemoteFilePathType], None]:
141
+ if valid_extensions is None:
142
+ valid_extensions = VALID_IMAGE_EXTENSIONS
143
+ files = []
144
+ # In opendal, current path should be "" instead of "."
145
+ _path = "" if self._file_path_str_warp(path) == "." else self._file_path_str_warp(path)
146
+ async for itm in await self.op.scan(_path):
147
+ if self._list_files_check(itm.path, pattern, valid_extensions):
148
+ files.append(PurePosixPath(itm.path))
149
+ if batch_max_files is not None and len(files) == batch_max_files:
150
+ yield files
151
+ files = []
152
+ if files:
153
+ yield files
154
+
155
+ async def update_metadata(self,
156
+ local_file_metadata: "LocalFileMetaDataType",
157
+ remote_file_metadata: "RemoteFileMetaDataType") -> None:
158
+ raise NotImplementedError
159
+
160
+ @staticmethod
161
+ def _list_files_check(x: str, pattern: str, valid_extensions: Optional[set[str]] = None) -> bool:
162
+ matches_pattern = glob.globmatch(x, pattern, flags=glob.GLOBSTAR)
163
+ has_valid_extension = os.path.splitext(x)[-1] in valid_extensions
164
+ is_not_directory = not x.endswith("/")
165
+ return matches_pattern and has_valid_extension and is_not_directory
166
+
167
+ @property
168
+ def _res_endpoint(self):
169
+ parsed_url = urllib.parse.urlparse(self.endpoint)
170
+ # If the endpoint is a subdomain of the bucket, then the endpoint is already resolved.
171
+ if self.bucket in parsed_url.netloc.split('.'):
172
+ return self.endpoint
173
+ return f"{self.endpoint}/{self.bucket}"
app/Services/transformers_service.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from time import time
2
+
3
+ import numpy as np
4
+ import torch
5
+ from PIL import Image
6
+ from loguru import logger
7
+ from numpy import ndarray
8
+ from torch import FloatTensor, no_grad
9
+ from transformers import CLIPProcessor, CLIPModel, BertTokenizer, BertModel
10
+
11
+ from app.Services.lifespan_service import LifespanService
12
+ from app.config import config
13
+
14
+
15
+ class TransformersService(LifespanService):
16
+ def __init__(self):
17
+ self.device = config.device
18
+ if self.device == "auto":
19
+ self.device = "cuda" if torch.cuda.is_available() else "cpu"
20
+ logger.info("Using device: {}; CLIP Model: {}, BERT Model: {}",
21
+ self.device, config.model.clip, config.model.bert)
22
+ self._clip_model = CLIPModel.from_pretrained(config.model.clip).to(self.device)
23
+ self._clip_processor = CLIPProcessor.from_pretrained(config.model.clip)
24
+ logger.success("CLIP Model loaded successfully")
25
+ if config.ocr_search.enable:
26
+ self._bert_model = BertModel.from_pretrained(config.model.bert).to(self.device)
27
+ self._bert_tokenizer = BertTokenizer.from_pretrained(config.model.bert)
28
+ logger.success("BERT Model loaded successfully")
29
+ else:
30
+ logger.info("OCR search is disabled. Skipping BERT model loading.")
31
+
32
+ @no_grad()
33
+ def get_image_vector(self, image: Image.Image) -> ndarray:
34
+ if image.mode != "RGB":
35
+ image = image.convert("RGB")
36
+ logger.info("Processing image...")
37
+ start_time = time()
38
+ inputs = self._clip_processor(images=image, return_tensors="pt").to(self.device)
39
+ logger.success("Image processed, now Inferring with CLIP model...")
40
+ outputs: FloatTensor = self._clip_model.get_image_features(**inputs)
41
+ logger.success("Inference done. Time elapsed: {:.2f}s", time() - start_time)
42
+ outputs /= outputs.norm(dim=-1, keepdim=True)
43
+ return outputs.numpy(force=True).reshape(-1)
44
+
45
+ @no_grad()
46
+ def get_text_vector(self, text: str) -> ndarray:
47
+ logger.info("Processing text...")
48
+ start_time = time()
49
+ inputs = self._clip_processor(text=text, return_tensors="pt").to(self.device)
50
+ logger.success("Text processed, now Inferring with CLIP model...")
51
+ outputs: FloatTensor = self._clip_model.get_text_features(**inputs)
52
+ logger.success("Inference done. Time elapsed: {:.2f}s", time() - start_time)
53
+ outputs /= outputs.norm(dim=-1, keepdim=True)
54
+ return outputs.numpy(force=True).reshape(-1)
55
+
56
+ @no_grad()
57
+ def get_bert_vector(self, text: str) -> ndarray:
58
+ start_time = time()
59
+ logger.info("Inferring with BERT model...")
60
+ inputs = self._bert_tokenizer(text.strip().lower(), return_tensors="pt", truncation=True).to(self.device)
61
+ outputs = self._bert_model(**inputs)
62
+ vector = outputs.last_hidden_state.mean(dim=1).squeeze()
63
+ logger.success("BERT inference done. Time elapsed: {:.2f}s", time() - start_time)
64
+ return vector.cpu().numpy()
65
+
66
+ @staticmethod
67
+ def get_random_vector(seed: int | None = None) -> ndarray:
68
+ generator = np.random.default_rng(seed)
69
+ vec = generator.uniform(-1, 1, 768)
70
+ return vec
app/Services/upload_service.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import gc
3
+ import io
4
+ import pathlib
5
+ from io import BytesIO
6
+
7
+ from PIL import Image
8
+ from loguru import logger
9
+
10
+ from app.Models.api_models.admin_query_params import UploadImageThumbnailMode
11
+ from app.Models.errors import PointDuplicateError
12
+ from app.Models.img_data import ImageData
13
+ from app.Services.index_service import IndexService
14
+ from app.Services.lifespan_service import LifespanService
15
+ from app.Services.storage import StorageService
16
+ from app.Services.vector_db_context import VectorDbContext
17
+ from app.config import config
18
+ from app.util.generate_uuid import generate_uuid
19
+
20
+
21
+ class UploadService(LifespanService):
22
+ def __init__(self, storage_service: StorageService, db_context: VectorDbContext, index_service: IndexService):
23
+ self._storage_service = storage_service
24
+ self._db_context = db_context
25
+ self._index_service = index_service
26
+
27
+ self._queue = asyncio.Queue(config.admin_index_queue_max_length)
28
+ self._upload_worker_task = asyncio.create_task(self._upload_worker())
29
+
30
+ self.uploading_ids = set()
31
+ self._processed_count = 0
32
+
33
+ async def _upload_worker(self):
34
+ while True:
35
+ img_data, *args = await self._queue.get()
36
+ try:
37
+ await self._upload_task(img_data, *args)
38
+ logger.success("Image {} uploaded and indexed. Queue Length: {} [-1]", img_data.id, self._queue.qsize())
39
+ except Exception as ex:
40
+ logger.error("Error occurred while uploading image {}", img_data.id)
41
+ logger.exception(ex)
42
+ finally:
43
+ self._queue.task_done()
44
+ self.uploading_ids.remove(img_data.id)
45
+ self._processed_count += 1
46
+ if self._processed_count % 50 == 0:
47
+ gc.collect()
48
+
49
+ async def _upload_task(self, img_data: ImageData, img_bytes: bytes, skip_ocr: bool,
50
+ thumbnail_mode: UploadImageThumbnailMode):
51
+ img = Image.open(BytesIO(img_bytes))
52
+ logger.info('Start indexing image {}. Local: {}. Size: {}', img_data.id, img_data.local, len(img_bytes))
53
+ file_name = f"{img_data.id}.{img_data.format}"
54
+ thumb_path = f"thumbnails/{img_data.id}.webp"
55
+ gen_thumb = thumbnail_mode == UploadImageThumbnailMode.ALWAYS or (
56
+ thumbnail_mode == UploadImageThumbnailMode.IF_NECESSARY and len(img_bytes) > 1024 * 500)
57
+
58
+ if img_data.local:
59
+ img_data.url = await self._storage_service.active_storage.url(file_name)
60
+ if gen_thumb:
61
+ img_data.thumbnail_url = await self._storage_service.active_storage.url(
62
+ f"thumbnails/{img_data.id}.webp")
63
+ img_data.local_thumbnail = True
64
+
65
+ await self._index_service.index_image(img, img_data, skip_ocr=skip_ocr, background=True)
66
+ logger.success("Image {} indexed.", img_data.id)
67
+
68
+ if img_data.local:
69
+ logger.info("Start uploading image {} to local storage.", img_data.id)
70
+ await self._storage_service.active_storage.upload(img_bytes, file_name)
71
+ logger.success("Image {} uploaded to local storage.", img_data.id)
72
+ if gen_thumb:
73
+ logger.info("Start generate and upload thumbnail for {}.", img_data.id)
74
+ img.thumbnail((256, 256), resample=Image.Resampling.LANCZOS)
75
+ img_byte_arr = BytesIO()
76
+ img.save(img_byte_arr, 'WebP', save_all=True)
77
+ await self._storage_service.active_storage.upload(img_byte_arr.getvalue(), thumb_path)
78
+ logger.success("Thumbnail for {} generated and uploaded!", img_data.id)
79
+
80
+ img.close()
81
+
82
+ async def queue_upload_image(self, img_data: ImageData, img_bytes: bytes, skip_ocr: bool,
83
+ thumbnail_mode: UploadImageThumbnailMode):
84
+ self.uploading_ids.add(img_data.id)
85
+ await self._queue.put((img_data, img_bytes, skip_ocr, thumbnail_mode))
86
+ logger.success("Image {} added to upload queue. Queue Length: {} [+1]", img_data.id, self._queue.qsize())
87
+
88
+ async def assign_image_id(self, img_file: pathlib.Path | io.BytesIO | bytes):
89
+ img_id = generate_uuid(img_file)
90
+ # check for duplicate points
91
+ if img_id in self.uploading_ids or len(await self._db_context.validate_ids([str(img_id)])) != 0:
92
+ logger.warning("Duplicate upload request for image id: {}", img_id)
93
+ raise PointDuplicateError(f"The uploaded point is already contained in the database! entity id: {img_id}",
94
+ img_id)
95
+ return img_id
96
+
97
+ async def sync_upload_image(self, img_data: ImageData, img_bytes: bytes, skip_ocr: bool,
98
+ thumbnail_mode: UploadImageThumbnailMode):
99
+ await self._upload_task(img_data, img_bytes, skip_ocr, thumbnail_mode)
100
+
101
+ def get_queue_size(self):
102
+ return self._queue.qsize()
103
+
104
+ async def on_exit(self): # pragma: no cover Hard to test in UT.
105
+ if self.get_queue_size() != 0:
106
+ logger.warning("There are still {} images in the upload queue. Waiting for upload process to be completed.",
107
+ self.get_queue_size())
108
+ await self._queue.join()
app/Services/vector_db_context.py ADDED
@@ -0,0 +1,334 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ import numpy
4
+ from grpc.aio import AioRpcError
5
+ from httpx import HTTPError
6
+ from loguru import logger
7
+ from qdrant_client import AsyncQdrantClient
8
+ from qdrant_client.http import models
9
+ from qdrant_client.models import RecommendStrategy
10
+
11
+ from app.Models.api_models.search_api_model import SearchModelEnum, SearchBasisEnum
12
+ from app.Models.img_data import ImageData
13
+ from app.Models.query_params import FilterParams
14
+ from app.Models.search_result import SearchResult
15
+ from app.Services.lifespan_service import LifespanService
16
+ from app.config import config, QdrantMode
17
+ from app.util.retry_deco_async import wrap_object, retry_async
18
+
19
+
20
+ class PointNotFoundError(ValueError):
21
+ def __init__(self, point_id: str):
22
+ self.point_id = point_id
23
+ super().__init__(f"Point {point_id} not found.")
24
+
25
+
26
+ class VectorDbContext(LifespanService):
27
+ IMG_VECTOR = "image_vector"
28
+ TEXT_VECTOR = "text_contain_vector"
29
+ AVAILABLE_POINT_TYPES = models.Record | models.ScoredPoint | models.PointStruct
30
+
31
+ def __init__(self):
32
+ match config.qdrant.mode:
33
+ case QdrantMode.SERVER:
34
+ self._client = AsyncQdrantClient(host=config.qdrant.host, port=config.qdrant.port,
35
+ grpc_port=config.qdrant.grpc_port, api_key=config.qdrant.api_key,
36
+ prefer_grpc=config.qdrant.prefer_grpc)
37
+ wrap_object(self._client, retry_async((AioRpcError, HTTPError)))
38
+ case QdrantMode.LOCAL:
39
+ self._client = AsyncQdrantClient(path=config.qdrant.local_path)
40
+ case QdrantMode.MEMORY:
41
+ logger.warning("Using in-memory Qdrant client. Data will be lost after application restart. "
42
+ "This should only be used for testing and debugging.")
43
+ self._client = AsyncQdrantClient(":memory:")
44
+ case _:
45
+ raise ValueError("Invalid Qdrant mode.")
46
+ self.collection_name = config.qdrant.coll
47
+
48
+ async def on_load(self):
49
+ if not await self.check_collection():
50
+ logger.warning("Collection not found. Initializing...")
51
+ await self.initialize_collection()
52
+
53
+ async def retrieve_by_id(self, image_id: str, with_vectors=False) -> ImageData:
54
+ """
55
+ Retrieve an item from database by id. Will raise PointNotFoundError if the given ID doesn't exist.
56
+ :param image_id: The ID to retrieve.
57
+ :param with_vectors: Whether to retrieve vectors.
58
+ :return: The retrieved item.
59
+ """
60
+ logger.info("Retrieving item {} from database...", image_id)
61
+ result = await self._client.retrieve(collection_name=self.collection_name,
62
+ ids=[image_id],
63
+ with_payload=True,
64
+ with_vectors=with_vectors)
65
+ if len(result) != 1:
66
+ logger.error("Point not exist.")
67
+ raise PointNotFoundError(image_id)
68
+ return self._get_img_data_from_point(result[0])
69
+
70
+ async def retrieve_by_ids(self, image_id: list[str], with_vectors=False) -> list[ImageData]:
71
+ """
72
+ Retrieve items from the database by IDs.
73
+ An exception is thrown if there are items in the IDs that do not exist in the database.
74
+ :param image_id: The list of IDs to retrieve.
75
+ :param with_vectors: Whether to retrieve vectors.
76
+ :return: The list of retrieved items.
77
+ """
78
+ logger.info("Retrieving {} items from database...", len(image_id))
79
+ result = await self._client.retrieve(collection_name=self.collection_name,
80
+ ids=image_id,
81
+ with_payload=True,
82
+ with_vectors=with_vectors)
83
+ result_point_ids = {t.id for t in result}
84
+ missing_point_ids = set(image_id) - result_point_ids
85
+ if len(missing_point_ids) > 0:
86
+ logger.error("{} points not exist.", len(missing_point_ids))
87
+ raise PointNotFoundError(str(missing_point_ids))
88
+ return self._get_img_data_from_points(result)
89
+
90
+ async def validate_ids(self, image_id: list[str]) -> list[str]:
91
+ """
92
+ Validate a list of IDs. Will return a list of valid IDs.
93
+ :param image_id: The list of IDs to validate.
94
+ :return: The list of valid IDs.
95
+ """
96
+ logger.info("Validating {} items from database...", len(image_id))
97
+ result = await self._client.retrieve(collection_name=self.collection_name,
98
+ ids=image_id,
99
+ with_payload=False,
100
+ with_vectors=False)
101
+ return [t.id for t in result]
102
+
103
+ async def querySearch(self, query_vector, query_vector_name: str = IMG_VECTOR,
104
+ top_k=10, skip=0, filter_param: FilterParams | None = None) -> list[SearchResult]:
105
+ logger.info("Querying Qdrant... top_k = {}", top_k)
106
+ result = await self._client.search(collection_name=self.collection_name,
107
+ query_vector=(query_vector_name, query_vector),
108
+ query_filter=self._get_filters_by_filter_param(filter_param),
109
+ limit=top_k,
110
+ offset=skip,
111
+ with_payload=True)
112
+ logger.success("Query completed!")
113
+ return [self._get_search_result_from_scored_point(t) for t in result]
114
+
115
+ async def querySimilar(self,
116
+ query_vector_name: str = IMG_VECTOR,
117
+ search_id: Optional[str] = None,
118
+ positive_vectors: Optional[list[numpy.ndarray]] = None,
119
+ negative_vectors: Optional[list[numpy.ndarray]] = None,
120
+ mode: Optional[SearchModelEnum] = None,
121
+ with_vectors: bool = False,
122
+ filter_param: FilterParams | None = None,
123
+ top_k: int = 10,
124
+ skip: int = 0) -> list[SearchResult]:
125
+ _positive_vectors = [t.tolist() for t in positive_vectors] if positive_vectors is not None else [search_id]
126
+ _negative_vectors = [t.tolist() for t in negative_vectors] if negative_vectors is not None else None
127
+ _strategy = None if mode is None else (RecommendStrategy.AVERAGE_VECTOR if
128
+ mode == SearchModelEnum.average else RecommendStrategy.BEST_SCORE)
129
+ # since only combined_search need return vectors, We can define _combined_search_need_vectors like below
130
+ _combined_search_need_vectors = [
131
+ self.IMG_VECTOR if query_vector_name == self.TEXT_VECTOR else self.TEXT_VECTOR] if with_vectors else None
132
+ logger.info("Querying Qdrant... top_k = {}", top_k)
133
+ result = await self._client.recommend(collection_name=self.collection_name,
134
+ using=query_vector_name,
135
+ positive=_positive_vectors,
136
+ negative=_negative_vectors,
137
+ strategy=_strategy,
138
+ with_vectors=_combined_search_need_vectors,
139
+ query_filter=self._get_filters_by_filter_param(filter_param),
140
+ limit=top_k,
141
+ offset=skip,
142
+ with_payload=True)
143
+ logger.success("Query completed!")
144
+
145
+ return [self._get_search_result_from_scored_point(t) for t in result]
146
+
147
+ async def insertItems(self, items: list[ImageData]):
148
+ logger.info("Inserting {} items into Qdrant...", len(items))
149
+
150
+ points = [self._get_point_from_img_data(t) for t in items]
151
+
152
+ response = await self._client.upsert(collection_name=self.collection_name,
153
+ wait=True,
154
+ points=points)
155
+ logger.success("Insert completed! Status: {}", response.status)
156
+
157
+ async def deleteItems(self, ids: list[str]):
158
+ logger.info("Deleting {} items from Qdrant...", len(ids))
159
+ response = await self._client.delete(collection_name=self.collection_name,
160
+ points_selector=models.PointIdsList(
161
+ points=ids
162
+ ),
163
+ )
164
+ logger.success("Delete completed! Status: {}", response.status)
165
+
166
+ async def updatePayload(self, new_data: ImageData):
167
+ """
168
+ Update the payload of an existing item in the database.
169
+ Warning: This method will not update the vector of the item.
170
+ :param new_data: The new data to update.
171
+ """
172
+ response = await self._client.set_payload(collection_name=self.collection_name,
173
+ payload=new_data.payload,
174
+ points=[str(new_data.id)],
175
+ wait=True)
176
+ logger.success("Update completed! Status: {}", response.status)
177
+
178
+ async def updateVectors(self, new_points: list[ImageData]):
179
+ resp = await self._client.update_vectors(collection_name=self.collection_name,
180
+ points=[self._get_vector_from_img_data(t) for t in new_points],
181
+ )
182
+ logger.success("Update vectors completed! Status: {}", resp.status)
183
+
184
+ async def scroll_points(self,
185
+ from_id: str | None = None,
186
+ count=50,
187
+ with_vectors=False,
188
+ filter_param: FilterParams | None = None,
189
+ ) -> tuple[list[ImageData], str]:
190
+ resp, next_id = await self._client.scroll(collection_name=self.collection_name,
191
+ limit=count,
192
+ offset=from_id,
193
+ with_vectors=with_vectors,
194
+ scroll_filter=self._get_filters_by_filter_param(filter_param)
195
+ )
196
+
197
+ return [self._get_img_data_from_point(t) for t in resp], next_id
198
+
199
+ async def get_counts(self, exact: bool) -> int:
200
+ resp = await self._client.count(collection_name=self.collection_name, exact=exact)
201
+ return resp.count
202
+
203
+ async def check_collection(self) -> bool:
204
+ resp = await self._client.get_collections()
205
+ resp = [t.name for t in resp.collections]
206
+ return self.collection_name in resp
207
+
208
+ async def initialize_collection(self):
209
+ if await self.check_collection():
210
+ logger.warning("Collection already exists. Skip initialization.")
211
+ return
212
+ logger.info("Initializing database, collection name: {}", self.collection_name)
213
+ vectors_config = {
214
+ self.IMG_VECTOR: models.VectorParams(size=768, distance=models.Distance.COSINE),
215
+ self.TEXT_VECTOR: models.VectorParams(size=768, distance=models.Distance.COSINE)
216
+ }
217
+ await self._client.create_collection(collection_name=self.collection_name,
218
+ vectors_config=vectors_config)
219
+ logger.success("Collection created!")
220
+
221
+ @classmethod
222
+ def _get_vector_from_img_data(cls, img_data: ImageData) -> models.PointVectors:
223
+ vector = {}
224
+ if img_data.image_vector is not None:
225
+ vector[cls.IMG_VECTOR] = img_data.image_vector.tolist()
226
+ if img_data.text_contain_vector is not None:
227
+ vector[cls.TEXT_VECTOR] = img_data.text_contain_vector.tolist()
228
+ return models.PointVectors(
229
+ id=str(img_data.id),
230
+ vector=vector
231
+ )
232
+
233
+ @classmethod
234
+ def _get_point_from_img_data(cls, img_data: ImageData) -> models.PointStruct:
235
+ return models.PointStruct(
236
+ id=str(img_data.id),
237
+ payload=img_data.payload,
238
+ vector=cls._get_vector_from_img_data(img_data).vector
239
+ )
240
+
241
+ def _get_img_data_from_point(self, point: AVAILABLE_POINT_TYPES) -> ImageData:
242
+ return (ImageData
243
+ .from_payload(point.id,
244
+ point.payload,
245
+ image_vector=numpy.array(point.vector[self.IMG_VECTOR], dtype=numpy.float32)
246
+ if point.vector and self.IMG_VECTOR in point.vector else None,
247
+ text_contain_vector=numpy.array(point.vector[self.TEXT_VECTOR], dtype=numpy.float32)
248
+ if point.vector and self.TEXT_VECTOR in point.vector else None
249
+ ))
250
+
251
+ def _get_img_data_from_points(self, points: list[AVAILABLE_POINT_TYPES]) -> list[ImageData]:
252
+ return [self._get_img_data_from_point(t) for t in points]
253
+
254
+ def _get_search_result_from_scored_point(self, point: models.ScoredPoint) -> SearchResult:
255
+ return SearchResult(img=self._get_img_data_from_point(point), score=point.score)
256
+
257
+ @classmethod
258
+ def vector_name_for_basis(cls, basis: SearchBasisEnum) -> str:
259
+ match basis:
260
+ case SearchBasisEnum.vision:
261
+ return cls.IMG_VECTOR
262
+ case SearchBasisEnum.ocr:
263
+ return cls.TEXT_VECTOR
264
+ case _:
265
+ raise ValueError("Invalid basis")
266
+
267
+ @staticmethod
268
+ def _get_filters_by_filter_param(filter_param: FilterParams | None) -> models.Filter | None:
269
+ if filter_param is None:
270
+ return None
271
+
272
+ filters = []
273
+ neg_filter = []
274
+ if filter_param.min_width is not None and filter_param.min_width > 0:
275
+ filters.append(models.FieldCondition(
276
+ key="width",
277
+ range=models.Range(
278
+ gte=filter_param.min_width
279
+ )
280
+ ))
281
+
282
+ if filter_param.min_height is not None and filter_param.min_height > 0:
283
+ filters.append(models.FieldCondition(
284
+ key="height",
285
+ range=models.Range(
286
+ gte=filter_param.min_height
287
+ )
288
+ ))
289
+
290
+ if filter_param.min_ratio is not None:
291
+ filters.append(models.FieldCondition(
292
+ key="aspect_ratio",
293
+ range=models.Range(
294
+ gte=filter_param.min_ratio,
295
+ lte=filter_param.max_ratio
296
+ )
297
+ ))
298
+
299
+ if filter_param.starred is not None:
300
+ filters.append(models.FieldCondition(
301
+ key="starred",
302
+ match=models.MatchValue(
303
+ value=filter_param.starred
304
+ )
305
+ ))
306
+
307
+ if filter_param.ocr_text is not None:
308
+ filters.append(models.FieldCondition(
309
+ key="ocr_text_lower",
310
+ match=models.MatchText(
311
+ text=filter_param.ocr_text.lower()
312
+ )
313
+ ))
314
+
315
+ if filter_param.categories is not None:
316
+ filters.append(models.FieldCondition(
317
+ key="categories",
318
+ match=models.MatchAny(
319
+ any=filter_param.categories
320
+ )
321
+ ))
322
+
323
+ if filter_param.categories_negative is not None:
324
+ neg_filter.append(models.FieldCondition(
325
+ key="categories",
326
+ match=models.MatchAny(any=filter_param.categories_negative),
327
+ ))
328
+
329
+ if not filters and not neg_filter:
330
+ return None
331
+ return models.Filter(
332
+ must=filters,
333
+ must_not=neg_filter
334
+ )
app/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ __title__ = 'NekoImageGallery'
2
+ __description__ = 'An AI-powered natural language & reverse Image Search Engine powered by CLIP & qdrant.'
3
+ __version__ = '1.2.0'
4
+ __author__ = 'EdgeNeko; pk5ls20'
5
+ __author_email__ = '[email protected]'
6
+ __url__ = 'https://github.com/hv0905/NekoImageGallery'