diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..d7e7167586afa2a49b59fb3e56602e771d6c2cf2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,19 @@ +.github +.DS_Store +docs +kubernetes +node_modules +/.svelte-kit +/package +.env +.env.* +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +__pycache__ +.idea +venv +_old +uploads +.ipynb_checkpoints +**/*.db +_test \ No newline at end of file diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..c38bf88bfb96e3a4f87e9f920096a93379a1e677 --- /dev/null +++ b/.env.example @@ -0,0 +1,13 @@ +# Ollama URL for the backend to connect +# The path '/ollama' will be redirected to the specified backend URL +OLLAMA_BASE_URL='http://localhost:11434' + +OPENAI_API_BASE_URL='' +OPENAI_API_KEY='' + +# AUTOMATIC1111_BASE_URL="http://localhost:7860" + +# DO NOT TRACK +SCARF_NO_ANALYTICS=true +DO_NOT_TRACK=true +ANONYMIZED_TELEMETRY=false \ No newline at end of file diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000000000000000000000000000000000000..38972655faff07d2cc0383044bbf9f43b22c2248 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/.eslintrc.cjs b/.eslintrc.cjs new file mode 100644 index 0000000000000000000000000000000000000000..cea095ea1aa19e444dc44264c138c95a82dfa04e --- /dev/null +++ b/.eslintrc.cjs @@ -0,0 +1,31 @@ +module.exports = { + root: true, + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:svelte/recommended', + 'plugin:cypress/recommended', + 'prettier' + ], + parser: '@typescript-eslint/parser', + plugins: ['@typescript-eslint'], + parserOptions: { + sourceType: 'module', + ecmaVersion: 2020, + extraFileExtensions: ['.svelte'] + }, + env: { + browser: true, + es2017: true, + node: true + }, + overrides: [ + { + files: ['*.svelte'], + parser: 'svelte-eslint-parser', + parserOptions: { + parser: '@typescript-eslint/parser' + } + } + ] +}; diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..f4382913f4ec5654c4d8c8ba17b25b64afc603d0 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.sh text eol=lf +*.ttf filter=lfs diff=lfs merge=lfs -text diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000000000000000000000000000000000000..ef274fa9184f884a8f4af07f0c246d0592eafe42 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: tjbck diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000000000000000000000000000000000..10958583fcc4d0e2d53dafcfa685e216b707e8ba --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,63 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' +--- + +# Bug Report + +## Description + +**Bug Summary:** +[Provide a brief but clear summary of the bug] + +**Steps to Reproduce:** +[Outline the steps to reproduce the bug. Be as detailed as possible.] + +**Expected Behavior:** +[Describe what you expected to happen.] + +**Actual Behavior:** +[Describe what actually happened.] + +## Environment + +- **Open WebUI Version:** [e.g., 0.1.120] +- **Ollama (if applicable):** [e.g., 0.1.30, 0.1.32-rc1] + +- **Operating System:** [e.g., Windows 10, macOS Big Sur, Ubuntu 20.04] +- **Browser (if applicable):** [e.g., Chrome 100.0, Firefox 98.0] + +## Reproduction Details + +**Confirmation:** + +- [ ] I have read and followed all the instructions provided in the README.md. +- [ ] I am on the latest version of both Open WebUI and Ollama. +- [ ] I have included the browser console logs. +- [ ] I have included the Docker container logs. + +## Logs and Screenshots + +**Browser Console Logs:** +[Include relevant browser console logs, if applicable] + +**Docker Container Logs:** +[Include relevant Docker container logs, if applicable] + +**Screenshots (if applicable):** +[Attach any relevant screenshots to help illustrate the issue] + +## Installation Method + +[Describe the method you used to install the project, e.g., manual installation, Docker, package manager, etc.] + +## Additional Information + +[Include any additional details that may help in understanding and reproducing the issue. This could include specific configurations, error messages, or anything else relevant to the bug.] + +## Note + +If the bug report is incomplete or does not follow the provided instructions, it may not be addressed. Please ensure that you have followed the steps outlined in the README.md and troubleshooting.md documents, and provide all necessary information for us to reproduce and address the issue. Thank you! diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000000000000000000000000000000..2f28cead03959213f1f1abd3f8a3a76f280bc1a0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,19 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000000000000000000000000000000000..166376305aa5ae7b8d3532b121bd44bd5230b757 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: + - package-ecosystem: pip + directory: '/backend' + schedule: + interval: weekly + target-branch: 'dev' + - package-ecosystem: 'github-actions' + directory: '/' + schedule: + # Check for updates to GitHub Actions every week + interval: 'weekly' diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000000000000000000000000000000000..2a45c2c16e41cccaea1dd2e67f7568fef1206959 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,72 @@ +# Pull Request Checklist + +### Note to first-time contributors: Please open a discussion post in [Discussions](https://github.com/open-webui/open-webui/discussions) and describe your changes before submitting a pull request. + +**Before submitting, make sure you've checked the following:** + +- [ ] **Target branch:** Please verify that the pull request targets the `dev` branch. +- [ ] **Description:** Provide a concise description of the changes made in this pull request. +- [ ] **Changelog:** Ensure a changelog entry following the format of [Keep a Changelog](https://keepachangelog.com/) is added at the bottom of the PR description. +- [ ] **Documentation:** Have you updated relevant documentation [Open WebUI Docs](https://github.com/open-webui/docs), or other documentation sources? +- [ ] **Dependencies:** Are there any new dependencies? Have you updated the dependency versions in the documentation? +- [ ] **Testing:** Have you written and run sufficient tests for validating the changes? +- [ ] **Code review:** Have you performed a self-review of your code, addressing any coding standard issues and ensuring adherence to the project's coding standards? +- [ ] **Prefix:** To cleary categorize this pull request, prefix the pull request title, using one of the following: + - **BREAKING CHANGE**: Significant changes that may affect compatibility + - **build**: Changes that affect the build system or external dependencies + - **ci**: Changes to our continuous integration processes or workflows + - **chore**: Refactor, cleanup, or other non-functional code changes + - **docs**: Documentation update or addition + - **feat**: Introduces a new feature or enhancement to the codebase + - **fix**: Bug fix or error correction + - **i18n**: Internationalization or localization changes + - **perf**: Performance improvement + - **refactor**: Code restructuring for better maintainability, readability, or scalability + - **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc.) + - **test**: Adding missing tests or correcting existing tests + - **WIP**: Work in progress, a temporary label for incomplete or ongoing work + +# Changelog Entry + +### Description + +- [Concisely describe the changes made in this pull request, including any relevant motivation and impact (e.g., fixing a bug, adding a feature, or improving performance)] + +### Added + +- [List any new features, functionalities, or additions] + +### Changed + +- [List any changes, updates, refactorings, or optimizations] + +### Deprecated + +- [List any deprecated functionality or features that have been removed] + +### Removed + +- [List any removed features, files, or functionalities] + +### Fixed + +- [List any fixes, corrections, or bug fixes] + +### Security + +- [List any new or updated security-related changes, including vulnerability fixes] + +### Breaking Changes + +- **BREAKING CHANGE**: [List any breaking changes affecting compatibility or functionality] + +--- + +### Additional Information + +- [Insert any additional context, notes, or explanations for the changes] + - [Reference any related issues, commits, or other relevant information] + +### Screenshots or Videos + +- [Attach any relevant screenshots or videos demonstrating the changes] diff --git a/.github/workflows/build-release.yml b/.github/workflows/build-release.yml new file mode 100644 index 0000000000000000000000000000000000000000..cae363f42a93e1b76b521a76a8bcdb19e97e76e3 --- /dev/null +++ b/.github/workflows/build-release.yml @@ -0,0 +1,70 @@ +name: Release + +on: + push: + branches: + - main # or whatever branch you want to use + +jobs: + release: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Check for changes in package.json + run: | + git diff --cached --diff-filter=d package.json || { + echo "No changes to package.json" + exit 1 + } + + - name: Get version number from package.json + id: get_version + run: | + VERSION=$(jq -r '.version' package.json) + echo "::set-output name=version::$VERSION" + + - name: Extract latest CHANGELOG entry + id: changelog + run: | + CHANGELOG_CONTENT=$(awk 'BEGIN {print_section=0;} /^## \[/ {if (print_section == 0) {print_section=1;} else {exit;}} print_section {print;}' CHANGELOG.md) + CHANGELOG_ESCAPED=$(echo "$CHANGELOG_CONTENT" | sed ':a;N;$!ba;s/\n/%0A/g') + echo "Extracted latest release notes from CHANGELOG.md:" + echo -e "$CHANGELOG_CONTENT" + echo "::set-output name=content::$CHANGELOG_ESCAPED" + + - name: Create GitHub release + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const changelog = `${{ steps.changelog.outputs.content }}`; + const release = await github.rest.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: `v${{ steps.get_version.outputs.version }}`, + name: `v${{ steps.get_version.outputs.version }}`, + body: changelog, + }) + console.log(`Created release ${release.data.html_url}`) + + - name: Upload package to GitHub release + uses: actions/upload-artifact@v4 + with: + name: package + path: . + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Trigger Docker build workflow + uses: actions/github-script@v7 + with: + script: | + github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yaml', + ref: 'v${{ steps.get_version.outputs.version }}', + }) diff --git a/.github/workflows/deploy-to-hf-spaces.yml b/.github/workflows/deploy-to-hf-spaces.yml new file mode 100644 index 0000000000000000000000000000000000000000..3ebb2bfdc8c015c5637ec6806606408e7d389df4 --- /dev/null +++ b/.github/workflows/deploy-to-hf-spaces.yml @@ -0,0 +1,65 @@ +name: Deploy to HuggingFace Spaces + +on: + push: + branches: + - hf-space + workflow_dispatch: + +jobs: + check-secret: + runs-on: ubuntu-latest + outputs: + token-set: ${{ steps.check-key.outputs.defined }} + steps: + - id: check-key + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + if: "${{ env.HF_TOKEN != '' }}" + run: echo "defined=true" >> $GITHUB_OUTPUT + + deploy: + runs-on: ubuntu-latest + needs: [check-secret] + if: needs.check-secret.outputs.token-set == 'true' + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + HF_USERNAME: ${{ secrets.HF_USERNAME }} + HF_SPACE_NAME: ${{ secrets.HF_SPACE_NAME }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Remove git history + run: rm -rf .git + + - name: Prepend YAML front matter to README.md + run: | + cat <README.md + --- + title: Open WebUI + emoji: 🐳 + colorFrom: purple + colorTo: gray + sdk: docker + app_port: 8080 + hf_oauth: true + hf_oauth_scopes: + - email + --- + $(cat README.md) + EOF + + - name: Configure git + run: | + git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + + - name: Set up Git and push to Space + run: | + git init --initial-branch=main + git lfs track "*.ttf" + rm demo.gif + git add . + git commit -m "GitHub deploy: ${{ github.sha }}" + git push --force https://${HF_USERNAME}:${HF_TOKEN}@huggingface.co/spaces/${HF_USERNAME}/${HF_SPACE_NAME} main diff --git a/.github/workflows/docker-build.yaml b/.github/workflows/docker-build.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0e62be3d9036b6f6f497bd2c9b7ad12ad89e6752 --- /dev/null +++ b/.github/workflows/docker-build.yaml @@ -0,0 +1,478 @@ +name: Create and publish Docker images with specific build args + +on: + workflow_dispatch: + push: + branches: + - main + - dev + tags: + - v* + +env: + REGISTRY: ghcr.io + +jobs: + build-main-image: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + strategy: + fail-fast: false + matrix: + platform: + - linux/amd64 + - linux/arm64 + + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (default latest tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + + - name: Extract metadata for Docker cache + id: cache-meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + ${{ github.ref_type == 'tag' && 'type=raw,value=main' || '' }} + flavor: | + prefix=cache-${{ matrix.platform }}- + latest=false + + - name: Build Docker image (latest) + uses: docker/build-push-action@v5 + id: build + with: + context: . + push: true + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} + cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max + build-args: | + BUILD_HASH=${{ github.sha }} + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-main-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + build-cuda-image: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + strategy: + fail-fast: false + matrix: + platform: + - linux/amd64 + - linux/arm64 + + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (cuda tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=cuda + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + suffix=-cuda,onlatest=true + + - name: Extract metadata for Docker cache + id: cache-meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + ${{ github.ref_type == 'tag' && 'type=raw,value=main' || '' }} + flavor: | + prefix=cache-cuda-${{ matrix.platform }}- + latest=false + + - name: Build Docker image (cuda) + uses: docker/build-push-action@v5 + id: build + with: + context: . + push: true + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} + cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max + build-args: | + BUILD_HASH=${{ github.sha }} + USE_CUDA=true + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-cuda-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + build-ollama-image: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + strategy: + fail-fast: false + matrix: + platform: + - linux/amd64 + - linux/arm64 + + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (ollama tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=ollama + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + suffix=-ollama,onlatest=true + + - name: Extract metadata for Docker cache + id: cache-meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + ${{ github.ref_type == 'tag' && 'type=raw,value=main' || '' }} + flavor: | + prefix=cache-ollama-${{ matrix.platform }}- + latest=false + + - name: Build Docker image (ollama) + uses: docker/build-push-action@v5 + id: build + with: + context: . + push: true + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} + cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max + build-args: | + BUILD_HASH=${{ github.sha }} + USE_OLLAMA=true + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-ollama-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + merge-main-images: + runs-on: ubuntu-latest + needs: [ build-main-image ] + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Download digests + uses: actions/download-artifact@v4 + with: + pattern: digests-main-* + path: /tmp/digests + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (default latest tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.FULL_IMAGE_NAME }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }} + + + merge-cuda-images: + runs-on: ubuntu-latest + needs: [ build-cuda-image ] + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Download digests + uses: actions/download-artifact@v4 + with: + pattern: digests-cuda-* + path: /tmp/digests + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (default latest tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=cuda + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + suffix=-cuda,onlatest=true + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.FULL_IMAGE_NAME }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }} + + merge-ollama-images: + runs-on: ubuntu-latest + needs: [ build-ollama-image ] + steps: + # GitHub Packages requires the entire repository name to be in lowercase + # although the repository owner has a lowercase username, this prevents some people from running actions after forking + - name: Set repository and image name to lowercase + run: | + echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV} + echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV} + env: + IMAGE_NAME: '${{ github.repository }}' + + - name: Download digests + uses: actions/download-artifact@v4 + with: + pattern: digests-ollama-* + path: /tmp/digests + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata for Docker images (default ollama tag) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.FULL_IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha,prefix=git- + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=ollama + flavor: | + latest=${{ github.ref == 'refs/heads/main' }} + suffix=-ollama,onlatest=true + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.FULL_IMAGE_NAME }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/format-backend.yaml b/.github/workflows/format-backend.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2e980de41f2d2d4118d8d3556113a08a6dfdb00a --- /dev/null +++ b/.github/workflows/format-backend.yaml @@ -0,0 +1,39 @@ +name: Python CI + +on: + push: + branches: + - main + - dev + pull_request: + branches: + - main + - dev + +jobs: + build: + name: 'Format Backend' + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.11] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install black + + - name: Format backend + run: npm run format:backend + + - name: Check for changes after format + run: git diff --exit-code diff --git a/.github/workflows/format-build-frontend.yaml b/.github/workflows/format-build-frontend.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9ee57f475a176a7c5b50e7227793a9d41ef3efdc --- /dev/null +++ b/.github/workflows/format-build-frontend.yaml @@ -0,0 +1,57 @@ +name: Frontend Build + +on: + push: + branches: + - main + - dev + pull_request: + branches: + - main + - dev + +jobs: + build: + name: 'Format & Build Frontend' + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' # Or specify any other version you want to use + + - name: Install Dependencies + run: npm install + + - name: Format Frontend + run: npm run format + + - name: Run i18next + run: npm run i18n:parse + + - name: Check for Changes After Format + run: git diff --exit-code + + - name: Build Frontend + run: npm run build + + test-frontend: + name: 'Frontend Unit Tests' + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install Dependencies + run: npm ci + + - name: Run vitest + run: npm run test:frontend diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml new file mode 100644 index 0000000000000000000000000000000000000000..639ea789fb12cc6d52d57c81f747e8ffed212de3 --- /dev/null +++ b/.github/workflows/integration-test.yml @@ -0,0 +1,243 @@ +name: Integration Test + +on: + push: + branches: + - main + - dev + pull_request: + branches: + - main + - dev + +jobs: + cypress-run: + name: Run Cypress Integration Tests + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Build and run Compose Stack + run: | + docker compose \ + --file docker-compose.yaml \ + --file docker-compose.api.yaml \ + --file docker-compose.a1111-test.yaml \ + up --detach --build + + - name: Wait for Ollama to be up + timeout-minutes: 5 + run: | + until curl --output /dev/null --silent --fail http://localhost:11434; do + printf '.' + sleep 1 + done + echo "Service is up!" + + - name: Delete Docker build cache + run: | + docker builder prune --all --force + + - name: Preload Ollama model + run: | + docker exec ollama ollama pull qwen:0.5b-chat-v1.5-q2_K + + - name: Cypress run + uses: cypress-io/github-action@v6 + with: + browser: chrome + wait-on: 'http://localhost:3000' + config: baseUrl=http://localhost:3000 + + - uses: actions/upload-artifact@v4 + if: always() + name: Upload Cypress videos + with: + name: cypress-videos + path: cypress/videos + if-no-files-found: ignore + + - name: Extract Compose logs + if: always() + run: | + docker compose logs > compose-logs.txt + + - uses: actions/upload-artifact@v4 + if: always() + name: Upload Compose logs + with: + name: compose-logs + path: compose-logs.txt + if-no-files-found: ignore + + # pytest: + # name: Run Backend Tests + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v4 + + # - name: Set up Python + # uses: actions/setup-python@v4 + # with: + # python-version: ${{ matrix.python-version }} + + # - name: Install dependencies + # run: | + # python -m pip install --upgrade pip + # pip install -r backend/requirements.txt + + # - name: pytest run + # run: | + # ls -al + # cd backend + # PYTHONPATH=. pytest . -o log_cli=true -o log_cli_level=INFO + + migration_test: + name: Run Migration Tests + runs-on: ubuntu-latest + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + # mysql: + # image: mysql + # env: + # MYSQL_ROOT_PASSWORD: mysql + # MYSQL_DATABASE: mysql + # options: >- + # --health-cmd "mysqladmin ping -h localhost" + # --health-interval 10s + # --health-timeout 5s + # --health-retries 5 + # ports: + # - 3306:3306 + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Set up uv + uses: yezz123/setup-uv@v4 + with: + uv-venv: venv + + - name: Activate virtualenv + run: | + . venv/bin/activate + echo PATH=$PATH >> $GITHUB_ENV + + - name: Install dependencies + run: | + uv pip install -r backend/requirements.txt + + - name: Test backend with SQLite + id: sqlite + env: + WEBUI_SECRET_KEY: secret-key + GLOBAL_LOG_LEVEL: debug + run: | + cd backend + uvicorn main:app --port "8080" --forwarded-allow-ips '*' & + UVICORN_PID=$! + # Wait up to 40 seconds for the server to start + for i in {1..40}; do + curl -s http://localhost:8080/api/config > /dev/null && break + sleep 1 + if [ $i -eq 40 ]; then + echo "Server failed to start" + kill -9 $UVICORN_PID + exit 1 + fi + done + # Check that the server is still running after 5 seconds + sleep 5 + if ! kill -0 $UVICORN_PID; then + echo "Server has stopped" + exit 1 + fi + + - name: Test backend with Postgres + if: success() || steps.sqlite.conclusion == 'failure' + env: + WEBUI_SECRET_KEY: secret-key + GLOBAL_LOG_LEVEL: debug + DATABASE_URL: postgresql://postgres:postgres@localhost:5432/postgres + run: | + cd backend + uvicorn main:app --port "8081" --forwarded-allow-ips '*' & + UVICORN_PID=$! + # Wait up to 20 seconds for the server to start + for i in {1..20}; do + curl -s http://localhost:8081/api/config > /dev/null && break + sleep 1 + if [ $i -eq 20 ]; then + echo "Server failed to start" + kill -9 $UVICORN_PID + exit 1 + fi + done + # Check that the server is still running after 5 seconds + sleep 5 + if ! kill -0 $UVICORN_PID; then + echo "Server has stopped" + exit 1 + fi + + # Check that service will reconnect to postgres when connection will be closed + status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db) + if [[ "$status_code" -ne 200 ]] ; then + echo "Server has failed before postgres reconnect check" + exit 1 + fi + + echo "Terminating all connections to postgres..." + python -c "import os, psycopg2 as pg2; \ + conn = pg2.connect(dsn=os.environ['DATABASE_URL'].replace('+pool', '')); \ + cur = conn.cursor(); \ + cur.execute('SELECT pg_terminate_backend(psa.pid) FROM pg_stat_activity psa WHERE datname = current_database() AND pid <> pg_backend_pid();')" + + status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db) + if [[ "$status_code" -ne 200 ]] ; then + echo "Server has not reconnected to postgres after connection was closed: returned status $status_code" + exit 1 + fi + +# - name: Test backend with MySQL +# if: success() || steps.sqlite.conclusion == 'failure' || steps.postgres.conclusion == 'failure' +# env: +# WEBUI_SECRET_KEY: secret-key +# GLOBAL_LOG_LEVEL: debug +# DATABASE_URL: mysql://root:mysql@localhost:3306/mysql +# run: | +# cd backend +# uvicorn main:app --port "8083" --forwarded-allow-ips '*' & +# UVICORN_PID=$! +# # Wait up to 20 seconds for the server to start +# for i in {1..20}; do +# curl -s http://localhost:8083/api/config > /dev/null && break +# sleep 1 +# if [ $i -eq 20 ]; then +# echo "Server failed to start" +# kill -9 $UVICORN_PID +# exit 1 +# fi +# done +# # Check that the server is still running after 5 seconds +# sleep 5 +# if ! kill -0 $UVICORN_PID; then +# echo "Server has stopped" +# exit 1 +# fi diff --git a/.github/workflows/lint-backend.disabled b/.github/workflows/lint-backend.disabled new file mode 100644 index 0000000000000000000000000000000000000000..d220031cc352bb5d2456b89b81ce33be26a09fcf --- /dev/null +++ b/.github/workflows/lint-backend.disabled @@ -0,0 +1,27 @@ +name: Python CI +on: + push: + branches: ['main'] + pull_request: +jobs: + build: + name: 'Lint Backend' + env: + PUBLIC_API_BASE_URL: '' + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - latest + steps: + - uses: actions/checkout@v4 + - name: Use Python + uses: actions/setup-python@v4 + - name: Use Bun + uses: oven-sh/setup-bun@v1 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + - name: Lint backend + run: bun run lint:backend diff --git a/.github/workflows/lint-frontend.disabled b/.github/workflows/lint-frontend.disabled new file mode 100644 index 0000000000000000000000000000000000000000..2c1cd3c5a574989def4319d07405f0bb621d74df --- /dev/null +++ b/.github/workflows/lint-frontend.disabled @@ -0,0 +1,21 @@ +name: Bun CI +on: + push: + branches: ['main'] + pull_request: +jobs: + build: + name: 'Lint Frontend' + env: + PUBLIC_API_BASE_URL: '' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Use Bun + uses: oven-sh/setup-bun@v1 + - run: bun --version + - name: Install frontend dependencies + run: bun install --frozen-lockfile + - run: bun run lint:frontend + - run: bun run lint:types + if: success() || failure() \ No newline at end of file diff --git a/.github/workflows/release-pypi.yml b/.github/workflows/release-pypi.yml new file mode 100644 index 0000000000000000000000000000000000000000..70a19c64a616d4e55bdc2042c3f19ba29cab7c87 --- /dev/null +++ b/.github/workflows/release-pypi.yml @@ -0,0 +1,31 @@ +name: Release to PyPI + +on: + push: + branches: + - main # or whatever branch you want to use + +jobs: + release: + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/open-webui + permissions: + id-token: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 18 + - uses: actions/setup-python@v5 + with: + python-version: 3.11 + - name: Build + run: | + python -m pip install --upgrade pip + pip install build + python -m build . + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/sync-hf-spaces-with-dev.yml b/.github/workflows/sync-hf-spaces-with-dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..62354c90dfc0accf813690210e45e33f5c717871 --- /dev/null +++ b/.github/workflows/sync-hf-spaces-with-dev.yml @@ -0,0 +1,30 @@ +name: Sync hf-spaces with dev + +on: + push: + branches: + - dev + - hf-spaces + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +jobs: + sync: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Sync with dev + run: | + git checkout dev + git fetch origin + git checkout hf-space + git pull + git merge origin/dev + git push origin hf-space diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..32271f8087e213e83089162bd0b1ec99c60d45ca --- /dev/null +++ b/.gitignore @@ -0,0 +1,309 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Pyodide distribution +static/pyodide/* +!static/pyodide/pyodide-lock.json + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# cypress artifacts +cypress/videos +cypress/screenshots +.vscode/settings.json diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000000000000000000000000000000000000..b6f27f135954640c8cc5bfd7b8c9922ca6eb2aad --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..82c49125724030d1010bb123df7dae758236ff11 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,316 @@ +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock + +kubernetes/ + +# Copy of .gitignore +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# cypress artifacts +cypress/videos +cypress/screenshots + + + +/static/* \ No newline at end of file diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000000000000000000000000000000000000..a77fddea90975988d17a7e8b2f61720939a947f5 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,9 @@ +{ + "useTabs": true, + "singleQuote": true, + "trailingComma": "none", + "printWidth": 100, + "plugins": ["prettier-plugin-svelte"], + "pluginSearchDirs": ["."], + "overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }] +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..a3bceff25832c8336ff54e989922f3edf93b3801 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,744 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.3.10] - 2024-07-17 + +### Fixed + +- **🔄 Improved File Upload**: Addressed the issue where file uploads lacked animation. +- **💬 Chat Continuity**: Fixed a problem where existing chats were not functioning properly in some instances. +- **🗂️ Chat File Reset**: Resolved the issue of chat files not resetting for new conversations, now ensuring a clean slate for each chat session. +- **📁 Document Workspace Uploads**: Corrected the handling of document uploads in the workspace using the Files API. + +## [0.3.9] - 2024-07-17 + +### Added + +- **📁 Files Chat Controls**: We've reverted to the old file handling behavior where uploaded files are always included. You can now manage files directly within the chat controls section, giving you the ability to remove files as needed. +- **🔧 "Action" Function Support**: Introducing a new "Action" function to write custom buttons to the message toolbar. This feature enables more interactive messaging, with documentation coming soon. +- **📜 Citations Handling**: For newly uploaded files in documents workspace, citations will now display the actual filename. Additionally, you can click on these filenames to open the file in a new tab for easier access. +- **🛠️ Event Emitter and Call Updates**: Enhanced 'event_emitter' to allow message replacement and 'event_call' to support text input for Tools and Functions. Detailed documentation will be provided shortly. +- **🎨 Styling Refactor**: Various styling updates for a cleaner and more cohesive user interface. +- **🌐 Enhanced Translations**: Improved translations for Catalan, Ukrainian, and Brazilian Portuguese. + +### Fixed + +- **🔧 Chat Controls Priority**: Resolved an issue where Chat Controls values were being overridden by model information parameters. The priority is now Chat Controls, followed by Global Settings, then Model Settings. +- **🪲 Debug Logs**: Fixed an issue where debug logs were not being logged properly. +- **🔑 Automatic1111 Auth Key**: The auth key for Automatic1111 is no longer required. +- **📝 Title Generation**: Ensured that the title generation runs only once, even when multiple models are in a chat. +- **✅ Boolean Values in Params**: Added support for boolean values in parameters. +- **🖼️ Files Overlay Styling**: Fixed the styling issue with the files overlay. + +### Changed + +- **⬆️ Dependency Updates** + - Upgraded 'pydantic' from version 2.7.1 to 2.8.2. + - Upgraded 'sqlalchemy' from version 2.0.30 to 2.0.31. + - Upgraded 'unstructured' from version 0.14.9 to 0.14.10. + - Upgraded 'chromadb' from version 0.5.3 to 0.5.4. + +## [0.3.8] - 2024-07-09 + +### Added + +- **💬 Chat Controls**: Easily adjust parameters for each chat session, offering more precise control over your interactions. +- **📌 Pinned Chats**: Support for pinned chats, allowing you to keep important conversations easily accessible. +- **📄 Apache Tika Integration**: Added support for using Apache Tika as a document loader, enhancing document processing capabilities. +- **🛠️ Custom Environment for OpenID Claims**: Allows setting custom claims for OpenID, providing more flexibility in user authentication. +- **🔧 Enhanced Tools & Functions API**: Introduced 'event_emitter' and 'event_call', now you can also add citations for better documentation and tracking. Detailed documentation will be provided on our documentation website. +- **↔️ Sideways Scrolling in Settings**: Settings tabs container now supports horizontal scrolling for easier navigation. +- **🌑 Darker OLED Theme**: Includes a new, darker OLED theme and improved styling for the light theme, enhancing visual appeal. +- **🌐 Language Updates**: Updated translations for Indonesian, German, French, and Catalan languages, expanding accessibility. + +### Fixed + +- **⏰ OpenAI Streaming Timeout**: Resolved issues with OpenAI streaming response using the 'AIOHTTP_CLIENT_TIMEOUT' setting, ensuring reliable performance. +- **💡 User Valves**: Fixed malfunctioning user valves, ensuring proper functionality. +- **🔄 Collapsible Components**: Addressed issues with collapsible components not working, restoring expected behavior. + +### Changed + +- **🗃️ Database Backend**: Switched from Peewee to SQLAlchemy for improved concurrency support, enhancing database performance. +- **⬆️ ChromaDB Update**: Upgraded to version 0.5.3. Ensure your remote ChromaDB instance matches this version. +- **🔤 Primary Font Styling**: Updated primary font to Archivo for better visual consistency. +- **🔄 Font Change for Windows**: Replaced Arimo with Inter font for Windows users, improving readability. +- **🚀 Lazy Loading**: Implemented lazy loading for 'faster_whisper' and 'sentence_transformers' to reduce startup memory usage. +- **📋 Task Generation Payload**: Task generations now include only the "task" field in the body instead of "title". + +## [0.3.7] - 2024-06-29 + +### Added + +- **🌐 Enhanced Internationalization (i18n)**: Newly introduced Indonesian translation, and updated translations for Turkish, Chinese, and Catalan languages to improve user accessibility. + +### Fixed + +- **🕵️‍♂️ Browser Language Detection**: Corrected the issue where the application was not properly detecting and adapting to the browser's language settings. +- **🔐 OIDC Admin Role Assignment**: Fixed a bug where the admin role was not being assigned to the first user who signed up via OpenID Connect (OIDC). +- **💬 Chat/Completions Endpoint**: Resolved an issue where the chat/completions endpoint was non-functional when the stream option was set to False. +- **🚫 'WEBUI_AUTH' Configuration**: Addressed the problem where setting 'WEBUI_AUTH' to False was not being applied correctly. + +### Changed + +- **📦 Dependency Update**: Upgraded 'authlib' from version 1.3.0 to 1.3.1 to ensure better security and performance enhancements. + +## [0.3.6] - 2024-06-27 + +### Added + +- **✨ "Functions" Feature**: You can now utilize "Functions" like filters (middleware) and pipe (model) functions directly within the WebUI. While largely compatible with Pipelines, these native functions can be executed easily within Open WebUI. Example use cases for filter functions include usage monitoring, real-time translation, moderation, and automemory. For pipe functions, the scope ranges from Cohere and Anthropic integration directly within Open WebUI, enabling "Valves" for per-user OpenAI API key usage, and much more. If you encounter issues, SAFE_MODE has been introduced. +- **📁 Files API**: Compatible with OpenAI, this feature allows for custom Retrieval-Augmented Generation (RAG) in conjunction with the Filter Function. More examples will be shared on our community platform and official documentation website. +- **🛠️ Tool Enhancements**: Tools now support citations and "Valves". Documentation will be available shortly. +- **🔗 Iframe Support via Files API**: Enables rendering HTML directly into your chat interface using functions and tools. Use cases include playing games like DOOM and Snake, displaying a weather applet, and implementing Anthropic "artifacts"-like features. Stay tuned for updates on our community platform and documentation. +- **🔒 Experimental OAuth Support**: New experimental OAuth support. Check our documentation for more details. +- **🖼️ Custom Background Support**: Set a custom background from Settings > Interface to personalize your experience. +- **🔑 AUTOMATIC1111_API_AUTH Support**: Enhanced security for the AUTOMATIC1111 API. +- **🎨 Code Highlight Optimization**: Improved code highlighting features. +- **🎙️ Voice Interruption Feature**: Reintroduced and now toggleable from Settings > Interface. +- **💤 Wakelock API**: Now in use to prevent screen dimming during important tasks. +- **🔐 API Key Privacy**: All API keys are now hidden by default for better security. +- **🔍 New Web Search Provider**: Added jina_search as a new option. +- **🌐 Enhanced Internationalization (i18n)**: Improved Korean translation and updated Chinese and Ukrainian translations. + +### Fixed + +- **🔧 Conversation Mode Issue**: Fixed the issue where Conversation Mode remained active after being removed from settings. +- **📏 Scroll Button Obstruction**: Resolved the issue where the scrollToBottom button container obstructed clicks on buttons beneath it. + +### Changed + +- **⏲️ AIOHTTP_CLIENT_TIMEOUT**: Now set to 'None' by default for improved configuration flexibility. +- **📞 Voice Call Enhancements**: Improved by skipping code blocks and expressions during calls. +- **🚫 Error Message Handling**: Disabled the continuation of operations with error messages. +- **🗂️ Playground Relocation**: Moved the Playground from the workspace to the user menu for better user experience. + +## [0.3.5] - 2024-06-16 + +### Added + +- **📞 Enhanced Voice Call**: Text-to-speech (TTS) callback now operates in real-time for each sentence, reducing latency by not waiting for full completion. +- **👆 Tap to Interrupt**: During a call, you can now stop the assistant from speaking by simply tapping, instead of using voice. This resolves the issue of the speaker's voice being mistakenly registered as input. +- **😊 Emoji Call**: Toggle this feature on from the Settings > Interface, allowing LLMs to express emotions using emojis during voice calls for a more dynamic interaction. +- **🖱️ Quick Archive/Delete**: Use the Shift key + mouseover on the chat list to swiftly archive or delete items. +- **📝 Markdown Support in Model Descriptions**: You can now format model descriptions with markdown, enabling bold text, links, etc. +- **🧠 Editable Memories**: Adds the capability to modify memories. +- **📋 Admin Panel Sorting**: Introduces the ability to sort users/chats within the admin panel. +- **🌑 Dark Mode for Quick Selectors**: Dark mode now available for chat quick selectors (prompts, models, documents). +- **🔧 Advanced Parameters**: Adds 'num_keep' and 'num_batch' to advanced parameters for customization. +- **📅 Dynamic System Prompts**: New variables '{{CURRENT_DATETIME}}', '{{CURRENT_TIME}}', '{{USER_LOCATION}}' added for system prompts. Ensure '{{USER_LOCATION}}' is toggled on from Settings > Interface. +- **🌐 Tavily Web Search**: Includes Tavily as a web search provider option. +- **🖊️ Federated Auth Usernames**: Ability to set user names for federated authentication. +- **🔗 Auto Clean URLs**: When adding connection URLs, trailing slashes are now automatically removed. +- **🌐 Enhanced Translations**: Improved Chinese and Swedish translations. + +### Fixed + +- **⏳ AIOHTTP_CLIENT_TIMEOUT**: Introduced a new environment variable 'AIOHTTP_CLIENT_TIMEOUT' for requests to Ollama lasting longer than 5 minutes. Default is 300 seconds; set to blank ('') for no timeout. +- **❌ Message Delete Freeze**: Resolved an issue where message deletion would sometimes cause the web UI to freeze. + +## [0.3.4] - 2024-06-12 + +### Fixed + +- **🔒 Mixed Content with HTTPS Issue**: Resolved a problem where mixed content (HTTP and HTTPS) was causing security warnings and blocking resources on HTTPS sites. +- **🔍 Web Search Issue**: Addressed the problem where web search functionality was not working correctly. The 'ENABLE_RAG_LOCAL_WEB_FETCH' option has been reintroduced to restore proper web searching capabilities. +- **💾 RAG Template Not Being Saved**: Fixed an issue where the RAG template was not being saved correctly, ensuring your custom templates are now preserved as expected. + +## [0.3.3] - 2024-06-12 + +### Added + +- **🛠️ Native Python Function Calling**: Introducing native Python function calling within Open WebUI. We’ve also included a built-in code editor to seamlessly develop and integrate function code within the 'Tools' workspace. With this, you can significantly enhance your LLM’s capabilities by creating custom RAG pipelines, web search tools, and even agent-like features such as sending Discord messages. +- **🌐 DuckDuckGo Integration**: Added DuckDuckGo as a web search provider, giving you more search options. +- **🌏 Enhanced Translations**: Improved translations for Vietnamese and Chinese languages, making the interface more accessible. + +### Fixed + +- **🔗 Web Search URL Error Handling**: Fixed the issue where a single URL error would disrupt the data loading process in Web Search mode. Now, such errors will be handled gracefully to ensure uninterrupted data loading. +- **🖥️ Frontend Responsiveness**: Resolved the problem where the frontend would stop responding if the backend encounters an error while downloading a model. Improved error handling to maintain frontend stability. +- **🔧 Dependency Issues in pip**: Fixed issues related to pip installations, ensuring all dependencies are correctly managed to prevent installation errors. + +## [0.3.2] - 2024-06-10 + +### Added + +- **🔍 Web Search Query Status**: The web search query will now persist in the results section to aid in easier debugging and tracking of search queries. +- **🌐 New Web Search Provider**: We have added Serply as a new option for web search providers, giving you more choices for your search needs. +- **🌏 Improved Translations**: We've enhanced translations for Chinese and Portuguese. + +### Fixed + +- **🎤 Audio File Upload Issue**: The bug that prevented audio files from being uploaded in chat input has been fixed, ensuring smooth communication. +- **💬 Message Input Handling**: Improved the handling of message inputs by instantly clearing images and text after sending, along with immediate visual indications when a response message is loading, enhancing user feedback. +- **⚙️ Parameter Registration and Validation**: Fixed the issue where parameters were not registering in certain cases and addressed the problem where users were unable to save due to invalid input errors. + +## [0.3.1] - 2024-06-09 + +### Fixed + +- **💬 Chat Functionality**: Resolved the issue where chat functionality was not working for specific models. + +## [0.3.0] - 2024-06-09 + +### Added + +- **📚 Knowledge Support for Models**: Attach documents directly to models from the models workspace, enhancing the information available to each model. +- **🎙️ Hands-Free Voice Call Feature**: Initiate voice calls without needing to use your hands, making interactions more seamless. +- **📹 Video Call Feature**: Enable video calls with supported vision models like Llava and GPT-4o, adding a visual dimension to your communications. +- **🎛️ Enhanced UI for Voice Recording**: Improved user interface for the voice recording feature, making it more intuitive and user-friendly. +- **🌐 External STT Support**: Now support for external Speech-To-Text services, providing more flexibility in choosing your STT provider. +- **⚙️ Unified Settings**: Consolidated settings including document settings under a new admin settings section for easier management. +- **🌑 Dark Mode Splash Screen**: A new splash screen for dark mode, ensuring a consistent and visually appealing experience for dark mode users. +- **📥 Upload Pipeline**: Directly upload pipelines from the admin settings > pipelines section, streamlining the pipeline management process. +- **🌍 Improved Language Support**: Enhanced support for Chinese and Ukrainian languages, better catering to a global user base. + +### Fixed + +- **🛠️ Playground Issue**: Fixed the playground not functioning properly, ensuring a smoother user experience. +- **🔥 Temperature Parameter Issue**: Corrected the issue where the temperature value '0' was not being passed correctly. +- **📝 Prompt Input Clearing**: Resolved prompt input textarea not being cleared right away, ensuring a clean slate for new inputs. +- **✨ Various UI Styling Issues**: Fixed numerous user interface styling problems for a more cohesive look. +- **👥 Active Users Display**: Fixed active users showing active sessions instead of actual users, now reflecting accurate user activity. +- **🌐 Community Platform Compatibility**: The Community Platform is back online and fully compatible with Open WebUI. + +### Changed + +- **📝 RAG Implementation**: Updated the RAG (Retrieval-Augmented Generation) implementation to use a system prompt for context, instead of overriding the user's prompt. +- **🔄 Settings Relocation**: Moved Models, Connections, Audio, and Images settings to the admin settings for better organization. +- **✍️ Improved Title Generation**: Enhanced the default prompt for title generation, yielding better results. +- **🔧 Backend Task Management**: Tasks like title generation and search query generation are now managed on the backend side and controlled only by the admin. +- **🔍 Editable Search Query Prompt**: You can now edit the search query generation prompt, offering more control over how queries are generated. +- **📏 Prompt Length Threshold**: Set the prompt length threshold for search query generation from the admin settings, giving more customization options. +- **📣 Settings Consolidation**: Merged the Banners admin setting with the Interface admin setting for a more streamlined settings area. + +## [0.2.5] - 2024-06-05 + +### Added + +- **👥 Active Users Indicator**: Now you can see how many people are currently active and what they are running. This helps you gauge when performance might slow down due to a high number of users. +- **🗂️ Create Ollama Modelfile**: The option to create a modelfile for Ollama has been reintroduced in the Settings > Models section, making it easier to manage your models. +- **⚙️ Default Model Setting**: Added an option to set the default model from Settings > Interface. This feature is now easily accessible, especially convenient for mobile users as it was previously hidden. +- **🌐 Enhanced Translations**: We've improved the Chinese translations and added support for Turkmen and Norwegian languages to make the interface more accessible globally. + +### Fixed + +- **📱 Mobile View Improvements**: The UI now uses dvh (dynamic viewport height) instead of vh (viewport height), providing a better and more responsive experience for mobile users. + +## [0.2.4] - 2024-06-03 + +### Added + +- **👤 Improved Account Pending Page**: The account pending page now displays admin details by default to avoid confusion. You can disable this feature in the admin settings if needed. +- **🌐 HTTP Proxy Support**: We have enabled the use of the 'http_proxy' environment variable in OpenAI and Ollama API calls, making it easier to configure network settings. +- **❓ Quick Access to Documentation**: You can now easily access Open WebUI documents via a question mark button located at the bottom right corner of the screen (available on larger screens like PCs). +- **🌍 Enhanced Translation**: Improvements have been made to translations. + +### Fixed + +- **🔍 SearxNG Web Search**: Fixed the issue where the SearxNG web search functionality was not working properly. + +## [0.2.3] - 2024-06-03 + +### Added + +- **📁 Export Chat as JSON**: You can now export individual chats as JSON files from the navbar menu by navigating to 'Download > Export Chat'. This makes sharing specific conversations easier. +- **✏️ Edit Titles with Double Click**: Double-click on titles to rename them quickly and efficiently. +- **🧩 Batch Multiple Embeddings**: Introduced 'RAG_EMBEDDING_OPENAI_BATCH_SIZE' to process multiple embeddings in a batch, enhancing performance for large datasets. +- **🌍 Improved Translations**: Enhanced the translation quality across various languages for a better user experience. + +### Fixed + +- **🛠️ Modelfile Migration Script**: Fixed an issue where the modelfile migration script would fail if an invalid modelfile was encountered. +- **💬 Zhuyin Input Method on Mac**: Resolved an issue where using the Zhuyin input method in the Web UI on a Mac caused text to send immediately upon pressing the enter key, leading to incorrect input. +- **🔊 Local TTS Voice Selection**: Fixed the issue where the selected local Text-to-Speech (TTS) voice was not being displayed in settings. + +## [0.2.2] - 2024-06-02 + +### Added + +- **🌊 Mermaid Rendering Support**: We've included support for Mermaid rendering. This allows you to create beautiful diagrams and flowcharts directly within Open WebUI. +- **🔄 New Environment Variable 'RESET_CONFIG_ON_START'**: Introducing a new environment variable: 'RESET_CONFIG_ON_START'. Set this variable to reset your configuration settings upon starting the application, making it easier to revert to default settings. + +### Fixed + +- **🔧 Pipelines Filter Issue**: We've addressed an issue with the pipelines where filters were not functioning as expected. + +## [0.2.1] - 2024-06-02 + +### Added + +- **🖱️ Single Model Export Button**: Easily export models with just one click using the new single model export button. +- **🖥️ Advanced Parameters Support**: Added support for 'num_thread', 'use_mmap', and 'use_mlock' parameters for Ollama. +- **🌐 Improved Vietnamese Translation**: Enhanced Vietnamese language support for a better user experience for our Vietnamese-speaking community. + +### Fixed + +- **🔧 OpenAI URL API Save Issue**: Corrected a problem preventing the saving of OpenAI URL API settings. +- **🚫 Display Issue with Disabled Ollama API**: Fixed the display bug causing models to appear in settings when the Ollama API was disabled. + +### Changed + +- **💡 Versioning Update**: As a reminder from our previous update, version 0.2.y will focus primarily on bug fixes, while major updates will be designated as 0.x from now on for better version tracking. + +## [0.2.0] - 2024-06-01 + +### Added + +- **🔧 Pipelines Support**: Open WebUI now includes a plugin framework for enhanced customization and functionality (https://github.com/open-webui/pipelines). Easily add custom logic and integrate Python libraries, from AI agents to home automation APIs. +- **🔗 Function Calling via Pipelines**: Integrate function calling seamlessly through Pipelines. +- **⚖️ User Rate Limiting via Pipelines**: Implement user-specific rate limits to manage API usage efficiently. +- **📊 Usage Monitoring with Langfuse**: Track and analyze usage statistics with Langfuse integration through Pipelines. +- **🕒 Conversation Turn Limits**: Set limits on conversation turns to manage interactions better through Pipelines. +- **🛡️ Toxic Message Filtering**: Automatically filter out toxic messages to maintain a safe environment using Pipelines. +- **🔍 Web Search Support**: Introducing built-in web search capabilities via RAG API, allowing users to search using SearXNG, Google Programmatic Search Engine, Brave Search, serpstack, and serper. Activate it effortlessly by adding necessary variables from Document settings > Web Params. +- **🗂️ Models Workspace**: Create and manage model presets for both Ollama/OpenAI API. Note: The old Modelfiles workspace is deprecated. +- **🛠️ Model Builder Feature**: Build and edit all models with persistent builder mode. +- **🏷️ Model Tagging Support**: Organize models with tagging features in the models workspace. +- **📋 Model Ordering Support**: Effortlessly organize models by dragging and dropping them into the desired positions within the models workspace. +- **📈 OpenAI Generation Stats**: Access detailed generation statistics for OpenAI models. +- **📅 System Prompt Variables**: New variables added: '{{CURRENT_DATE}}' and '{{USER_NAME}}' for dynamic prompts. +- **📢 Global Banner Support**: Manage global banners from admin settings > banners. +- **🗃️ Enhanced Archived Chats Modal**: Search and export archived chats easily. +- **📂 Archive All Button**: Quickly archive all chats from settings > chats. +- **🌐 Improved Translations**: Added and improved translations for French, Croatian, Cebuano, and Vietnamese. + +### Fixed + +- **🔍 Archived Chats Visibility**: Resolved issue with archived chats not showing in the admin panel. +- **💬 Message Styling**: Fixed styling issues affecting message appearance. +- **🔗 Shared Chat Responses**: Corrected the issue where shared chat response messages were not readonly. +- **🖥️ UI Enhancement**: Fixed the scrollbar overlapping issue with the message box in the user interface. + +### Changed + +- **💾 User Settings Storage**: User settings are now saved on the backend, ensuring consistency across all devices. +- **📡 Unified API Requests**: The API request for getting models is now unified to '/api/models' for easier usage. +- **🔄 Versioning Update**: Our versioning will now follow the format 0.x for major updates and 0.x.y for patches. +- **📦 Export All Chats (All Users)**: Moved this functionality to the Admin Panel settings for better organization and accessibility. + +### Removed + +- **🚫 Bundled LiteLLM Support Deprecated**: Migrate your LiteLLM config.yaml to a self-hosted LiteLLM instance. LiteLLM can still be added via OpenAI Connections. Download the LiteLLM config.yaml from admin settings > database > export LiteLLM config.yaml. + +## [0.1.125] - 2024-05-19 + +### Added + +- **🔄 Updated UI**: Chat interface revamped with chat bubbles. Easily switch back to the old style via settings > interface > chat bubble UI. +- **📂 Enhanced Sidebar UI**: Model files, documents, prompts, and playground merged into Workspace for streamlined access. +- **🚀 Improved Many Model Interaction**: All responses now displayed simultaneously for a smoother experience. +- **🐍 Python Code Execution**: Execute Python code locally in the browser with libraries like 'requests', 'beautifulsoup4', 'numpy', 'pandas', 'seaborn', 'matplotlib', 'scikit-learn', 'scipy', 'regex'. +- **🧠 Experimental Memory Feature**: Manually input personal information you want LLMs to remember via settings > personalization > memory. +- **💾 Persistent Settings**: Settings now saved as config.json for convenience. +- **🩺 Health Check Endpoint**: Added for Docker deployment. +- **↕️ RTL Support**: Toggle chat direction via settings > interface > chat direction. +- **🖥️ PowerPoint Support**: RAG pipeline now supports PowerPoint documents. +- **🌐 Language Updates**: Ukrainian, Turkish, Arabic, Chinese, Serbian, Vietnamese updated; Punjabi added. + +### Changed + +- **👤 Shared Chat Update**: Shared chat now includes creator user information. + +## [0.1.124] - 2024-05-08 + +### Added + +- **🖼️ Improved Chat Sidebar**: Now conveniently displays time ranges and organizes chats by today, yesterday, and more. +- **📜 Citations in RAG Feature**: Easily track the context fed to the LLM with added citations in the RAG feature. +- **🔒 Auth Disable Option**: Introducing the ability to disable authentication. Set 'WEBUI_AUTH' to False to disable authentication. Note: Only applicable for fresh installations without existing users. +- **📹 Enhanced YouTube RAG Pipeline**: Now supports non-English videos for an enriched experience. +- **🔊 Specify OpenAI TTS Models**: Customize your TTS experience by specifying OpenAI TTS models. +- **🔧 Additional Environment Variables**: Discover more environment variables in our comprehensive documentation at Open WebUI Documentation (https://docs.openwebui.com). +- **🌐 Language Support**: Arabic, Finnish, and Hindi added; Improved support for German, Vietnamese, and Chinese. + +### Fixed + +- **🛠️ Model Selector Styling**: Addressed styling issues for improved user experience. +- **⚠️ Warning Messages**: Resolved backend warning messages. + +### Changed + +- **📝 Title Generation**: Limited output to 50 tokens. +- **📦 Helm Charts**: Removed Helm charts, now available in a separate repository (https://github.com/open-webui/helm-charts). + +## [0.1.123] - 2024-05-02 + +### Added + +- **🎨 New Landing Page Design**: Refreshed design for a more modern look and optimized use of screen space. +- **📹 Youtube RAG Pipeline**: Introduces dedicated RAG pipeline for Youtube videos, enabling interaction with video transcriptions directly. +- **🔧 Enhanced Admin Panel**: Streamlined user management with options to add users directly or in bulk via CSV import. +- **👥 '@' Model Integration**: Easily switch to specific models during conversations; old collaborative chat feature phased out. +- **🌐 Language Enhancements**: Swedish translation added, plus improvements to German, Spanish, and the addition of Doge translation. + +### Fixed + +- **🗑️ Delete Chat Shortcut**: Addressed issue where shortcut wasn't functioning. +- **🖼️ Modal Closing Bug**: Resolved unexpected closure of modal when dragging from within. +- **✏️ Edit Button Styling**: Fixed styling inconsistency with edit buttons. +- **🌐 Image Generation Compatibility Issue**: Rectified image generation compatibility issue with third-party APIs. +- **📱 iOS PWA Icon Fix**: Corrected iOS PWA home screen icon shape. +- **🔍 Scroll Gesture Bug**: Adjusted gesture sensitivity to prevent accidental activation when scrolling through code on mobile; now requires scrolling from the leftmost side to open the sidebar. + +### Changed + +- **🔄 Unlimited Context Length**: Advanced settings now allow unlimited max context length (previously limited to 16000). +- **👑 Super Admin Assignment**: The first signup is automatically assigned a super admin role, unchangeable by other admins. +- **🛡️ Admin User Restrictions**: User action buttons from the admin panel are now disabled for users with admin roles. +- **🔝 Default Model Selector**: Set as default model option now exclusively available on the landing page. + +## [0.1.122] - 2024-04-27 + +### Added + +- **🌟 Enhanced RAG Pipeline**: Now with hybrid searching via 'BM25', reranking powered by 'CrossEncoder', and configurable relevance score thresholds. +- **🛢️ External Database Support**: Seamlessly connect to custom SQLite or Postgres databases using the 'DATABASE_URL' environment variable. +- **🌐 Remote ChromaDB Support**: Introducing the capability to connect to remote ChromaDB servers. +- **👨‍💼 Improved Admin Panel**: Admins can now conveniently check users' chat lists and last active status directly from the admin panel. +- **🎨 Splash Screen**: Introducing a loading splash screen for a smoother user experience. +- **🌍 Language Support Expansion**: Added support for Bangla (bn-BD), along with enhancements to Chinese, Spanish, and Ukrainian translations. +- **💻 Improved LaTeX Rendering Performance**: Enjoy faster rendering times for LaTeX equations. +- **🔧 More Environment Variables**: Explore additional environment variables in our documentation (https://docs.openwebui.com), including the 'ENABLE_LITELLM' option to manage memory usage. + +### Fixed + +- **🔧 Ollama Compatibility**: Resolved errors occurring when Ollama server version isn't an integer, such as SHA builds or RCs. +- **🐛 Various OpenAI API Issues**: Addressed several issues related to the OpenAI API. +- **🛑 Stop Sequence Issue**: Fixed the problem where the stop sequence with a backslash '\' was not functioning. +- **🔤 Font Fallback**: Corrected font fallback issue. + +### Changed + +- **⌨️ Prompt Input Behavior on Mobile**: Enter key prompt submission disabled on mobile devices for improved user experience. + +## [0.1.121] - 2024-04-24 + +### Fixed + +- **🔧 Translation Issues**: Addressed various translation discrepancies. +- **🔒 LiteLLM Security Fix**: Updated LiteLLM version to resolve a security vulnerability. +- **🖥️ HTML Tag Display**: Rectified the issue where the '< br >' tag wasn't displaying correctly. +- **🔗 WebSocket Connection**: Resolved the failure of WebSocket connection under HTTPS security for ComfyUI server. +- **📜 FileReader Optimization**: Implemented FileReader initialization per image in multi-file drag & drop to ensure reusability. +- **🏷️ Tag Display**: Corrected tag display inconsistencies. +- **📦 Archived Chat Styling**: Fixed styling issues in archived chat. +- **🔖 Safari Copy Button Bug**: Addressed the bug where the copy button failed to copy links in Safari. + +## [0.1.120] - 2024-04-20 + +### Added + +- **📦 Archive Chat Feature**: Easily archive chats with a new sidebar button, and access archived chats via the profile button > archived chats. +- **🔊 Configurable Text-to-Speech Endpoint**: Customize your Text-to-Speech experience with configurable OpenAI endpoints. +- **🛠️ Improved Error Handling**: Enhanced error message handling for connection failures. +- **⌨️ Enhanced Shortcut**: When editing messages, use ctrl/cmd+enter to save and submit, and esc to close. +- **🌐 Language Support**: Added support for Georgian and enhanced translations for Portuguese and Vietnamese. + +### Fixed + +- **🔧 Model Selector**: Resolved issue where default model selection was not saving. +- **🔗 Share Link Copy Button**: Fixed bug where the copy button wasn't copying links in Safari. +- **🎨 Light Theme Styling**: Addressed styling issue with the light theme. + +## [0.1.119] - 2024-04-16 + +### Added + +- **🌟 Enhanced RAG Embedding Support**: Ollama, and OpenAI models can now be used for RAG embedding model. +- **🔄 Seamless Integration**: Copy 'ollama run ' directly from Ollama page to easily select and pull models. +- **🏷️ Tagging Feature**: Add tags to chats directly via the sidebar chat menu. +- **📱 Mobile Accessibility**: Swipe left and right on mobile to effortlessly open and close the sidebar. +- **🔍 Improved Navigation**: Admin panel now supports pagination for user list. +- **🌍 Additional Language Support**: Added Polish language support. + +### Fixed + +- **🌍 Language Enhancements**: Vietnamese and Spanish translations have been improved. +- **🔧 Helm Fixes**: Resolved issues with Helm trailing slash and manifest.json. + +### Changed + +- **🐳 Docker Optimization**: Updated docker image build process to utilize 'uv' for significantly faster builds compared to 'pip3'. + +## [0.1.118] - 2024-04-10 + +### Added + +- **🦙 Ollama and CUDA Images**: Added support for ':ollama' and ':cuda' tagged images. +- **👍 Enhanced Response Rating**: Now you can annotate your ratings for better feedback. +- **👤 User Initials Profile Photo**: User initials are now the default profile photo. +- **🔍 Update RAG Embedding Model**: Customize RAG embedding model directly in document settings. +- **🌍 Additional Language Support**: Added Turkish language support. + +### Fixed + +- **🔒 Share Chat Permission**: Resolved issue with chat sharing permissions. +- **🛠 Modal Close**: Modals can now be closed using the Esc key. + +### Changed + +- **🎨 Admin Panel Styling**: Refreshed styling for the admin panel. +- **🐳 Docker Image Build**: Updated docker image build process for improved efficiency. + +## [0.1.117] - 2024-04-03 + +### Added + +- 🗨️ **Local Chat Sharing**: Share chat links seamlessly between users. +- 🔑 **API Key Generation Support**: Generate secret keys to leverage Open WebUI with OpenAI libraries. +- 📄 **Chat Download as PDF**: Easily download chats in PDF format. +- 📝 **Improved Logging**: Enhancements to logging functionality. +- 📧 **Trusted Email Authentication**: Authenticate using a trusted email header. + +### Fixed + +- 🌷 **Enhanced Dutch Translation**: Improved translation for Dutch users. +- ⚪ **White Theme Styling**: Resolved styling issue with the white theme. +- 📜 **LaTeX Chat Screen Overflow**: Fixed screen overflow issue with LaTeX rendering. +- 🔒 **Security Patches**: Applied necessary security patches. + +## [0.1.116] - 2024-03-31 + +### Added + +- **🔄 Enhanced UI**: Model selector now conveniently located in the navbar, enabling seamless switching between multiple models during conversations. +- **🔍 Improved Model Selector**: Directly pull a model from the selector/Models now display detailed information for better understanding. +- **💬 Webhook Support**: Now compatible with Google Chat and Microsoft Teams. +- **🌐 Localization**: Korean translation (I18n) now available. +- **🌑 Dark Theme**: OLED dark theme introduced for reduced strain during prolonged usage. +- **🏷️ Tag Autocomplete**: Dropdown feature added for effortless chat tagging. + +### Fixed + +- **🔽 Auto-Scrolling**: Addressed OpenAI auto-scrolling issue. +- **🏷️ Tag Validation**: Implemented tag validation to prevent empty string tags. +- **🚫 Model Whitelisting**: Resolved LiteLLM model whitelisting issue. +- **✅ Spelling**: Corrected various spelling issues for improved readability. + +## [0.1.115] - 2024-03-24 + +### Added + +- **🔍 Custom Model Selector**: Easily find and select custom models with the new search filter feature. +- **🛑 Cancel Model Download**: Added the ability to cancel model downloads. +- **🎨 Image Generation ComfyUI**: Image generation now supports ComfyUI. +- **🌟 Updated Light Theme**: Updated the light theme for a fresh look. +- **🌍 Additional Language Support**: Now supporting Bulgarian, Italian, Portuguese, Japanese, and Dutch. + +### Fixed + +- **🔧 Fixed Broken Experimental GGUF Upload**: Resolved issues with experimental GGUF upload functionality. + +### Changed + +- **🔄 Vector Storage Reset Button**: Moved the reset vector storage button to document settings. + +## [0.1.114] - 2024-03-20 + +### Added + +- **🔗 Webhook Integration**: Now you can subscribe to new user sign-up events via webhook. Simply navigate to the admin panel > admin settings > webhook URL. +- **🛡️ Enhanced Model Filtering**: Alongside Ollama, OpenAI proxy model whitelisting, we've added model filtering functionality for LiteLLM proxy. +- **🌍 Expanded Language Support**: Spanish, Catalan, and Vietnamese languages are now available, with improvements made to others. + +### Fixed + +- **🔧 Input Field Spelling**: Resolved issue with spelling mistakes in input fields. +- **🖊️ Light Mode Styling**: Fixed styling issue with light mode in document adding. + +### Changed + +- **🔄 Language Sorting**: Languages are now sorted alphabetically by their code for improved organization. + +## [0.1.113] - 2024-03-18 + +### Added + +- 🌍 **Localization**: You can now change the UI language in Settings > General. We support Ukrainian, German, Farsi (Persian), Traditional and Simplified Chinese and French translations. You can help us to translate the UI into your language! More info in our [CONTRIBUTION.md](https://github.com/open-webui/open-webui/blob/main/docs/CONTRIBUTING.md#-translations-and-internationalization). +- 🎨 **System-wide Theme**: Introducing a new system-wide theme for enhanced visual experience. + +### Fixed + +- 🌑 **Dark Background on Select Fields**: Improved readability by adding a dark background to select fields, addressing issues on certain browsers/devices. +- **Multiple OPENAI_API_BASE_URLS Issue**: Resolved issue where multiple base URLs caused conflicts when one wasn't functioning. +- **RAG Encoding Issue**: Fixed encoding problem in RAG. +- **npm Audit Fix**: Addressed npm audit findings. +- **Reduced Scroll Threshold**: Improved auto-scroll experience by reducing the scroll threshold from 50px to 5px. + +### Changed + +- 🔄 **Sidebar UI Update**: Updated sidebar UI to feature a chat menu dropdown, replacing two icons for improved navigation. + +## [0.1.112] - 2024-03-15 + +### Fixed + +- 🗨️ Resolved chat malfunction after image generation. +- 🎨 Fixed various RAG issues. +- 🧪 Rectified experimental broken GGUF upload logic. + +## [0.1.111] - 2024-03-10 + +### Added + +- 🛡️ **Model Whitelisting**: Admins now have the ability to whitelist models for users with the 'user' role. +- 🔄 **Update All Models**: Added a convenient button to update all models at once. +- 📄 **Toggle PDF OCR**: Users can now toggle PDF OCR option for improved parsing performance. +- 🎨 **DALL-E Integration**: Introduced DALL-E integration for image generation alongside automatic1111. +- 🛠️ **RAG API Refactoring**: Refactored RAG logic and exposed its API, with additional documentation to follow. + +### Fixed + +- 🔒 **Max Token Settings**: Added max token settings for anthropic/claude-3-sonnet-20240229 (Issue #1094). +- 🔧 **Misalignment Issue**: Corrected misalignment of Edit and Delete Icons when Chat Title is Empty (Issue #1104). +- 🔄 **Context Loss Fix**: Resolved RAG losing context on model response regeneration with Groq models via API key (Issue #1105). +- 📁 **File Handling Bug**: Addressed File Not Found Notification when Dropping a Conversation Element (Issue #1098). +- 🖱️ **Dragged File Styling**: Fixed dragged file layover styling issue. + +## [0.1.110] - 2024-03-06 + +### Added + +- **🌐 Multiple OpenAI Servers Support**: Enjoy seamless integration with multiple OpenAI-compatible APIs, now supported natively. + +### Fixed + +- **🔍 OCR Issue**: Resolved PDF parsing issue caused by OCR malfunction. +- **🚫 RAG Issue**: Fixed the RAG functionality, ensuring it operates smoothly. +- **📄 "Add Docs" Model Button**: Addressed the non-functional behavior of the "Add Docs" model button. + +## [0.1.109] - 2024-03-06 + +### Added + +- **🔄 Multiple Ollama Servers Support**: Enjoy enhanced scalability and performance with support for multiple Ollama servers in a single WebUI. Load balancing features are now available, providing improved efficiency (#788, #278). +- **🔧 Support for Claude 3 and Gemini**: Responding to user requests, we've expanded our toolset to include Claude 3 and Gemini, offering a wider range of functionalities within our platform (#1064). +- **🔍 OCR Functionality for PDF Loader**: We've augmented our PDF loader with Optical Character Recognition (OCR) capabilities. Now, extract text from scanned documents and images within PDFs, broadening the scope of content processing (#1050). + +### Fixed + +- **🛠️ RAG Collection**: Implemented a dynamic mechanism to recreate RAG collections, ensuring users have up-to-date and accurate data (#1031). +- **📝 User Agent Headers**: Fixed issue of RAG web requests being sent with empty user_agent headers, reducing rejections from certain websites. Realistic headers are now utilized for these requests (#1024). +- **⏹️ Playground Cancel Functionality**: Introducing a new "Cancel" option for stopping Ollama generation in the Playground, enhancing user control and usability (#1006). +- **🔤 Typographical Error in 'ASSISTANT' Field**: Corrected a typographical error in the 'ASSISTANT' field within the GGUF model upload template for accuracy and consistency (#1061). + +### Changed + +- **🔄 Refactored Message Deletion Logic**: Streamlined message deletion process for improved efficiency and user experience, simplifying interactions within the platform (#1004). +- **⚠️ Deprecation of `OLLAMA_API_BASE_URL`**: Deprecated `OLLAMA_API_BASE_URL` environment variable; recommend using `OLLAMA_BASE_URL` instead. Refer to our documentation for further details. + +## [0.1.108] - 2024-03-02 + +### Added + +- **🎮 Playground Feature (Beta)**: Explore the full potential of the raw API through an intuitive UI with our new playground feature, accessible to admins. Simply click on the bottom name area of the sidebar to access it. The playground feature offers two modes text completion (notebook) and chat completion. As it's in beta, please report any issues you encounter. +- **🛠️ Direct Database Download for Admins**: Admins can now download the database directly from the WebUI via the admin settings. +- **🎨 Additional RAG Settings**: Customize your RAG process with the ability to edit the TOP K value. Navigate to Documents > Settings > General to make changes. +- **🖥️ UI Improvements**: Tooltips now available in the input area and sidebar handle. More tooltips will be added across other parts of the UI. + +### Fixed + +- Resolved input autofocus issue on mobile when the sidebar is open, making it easier to use. +- Corrected numbered list display issue in Safari (#963). +- Restricted user ability to delete chats without proper permissions (#993). + +### Changed + +- **Simplified Ollama Settings**: Ollama settings now don't require the `/api` suffix. You can now utilize the Ollama base URL directly, e.g., `http://localhost:11434`. Also, an `OLLAMA_BASE_URL` environment variable has been added. +- **Database Renaming**: Starting from this release, `ollama.db` will be automatically renamed to `webui.db`. + +## [0.1.107] - 2024-03-01 + +### Added + +- **🚀 Makefile and LLM Update Script**: Included Makefile and a script for LLM updates in the repository. + +### Fixed + +- Corrected issue where links in the settings modal didn't appear clickable (#960). +- Fixed problem with web UI port not taking effect due to incorrect environment variable name in run-compose.sh (#996). +- Enhanced user experience by displaying chat in browser title and enabling automatic scrolling to the bottom (#992). + +### Changed + +- Upgraded toast library from `svelte-french-toast` to `svelte-sonner` for a more polished UI. +- Enhanced accessibility with the addition of dark mode on the authentication page. + +## [0.1.106] - 2024-02-27 + +### Added + +- **🎯 Auto-focus Feature**: The input area now automatically focuses when initiating or opening a chat conversation. + +### Fixed + +- Corrected typo from "HuggingFace" to "Hugging Face" (Issue #924). +- Resolved bug causing errors in chat completion API calls to OpenAI due to missing "num_ctx" parameter (Issue #927). +- Fixed issues preventing text editing, selection, and cursor retention in the input field (Issue #940). +- Fixed a bug where defining an OpenAI-compatible API server using 'OPENAI_API_BASE_URL' containing 'openai' string resulted in hiding models not containing 'gpt' string from the model menu. (Issue #930) + +## [0.1.105] - 2024-02-25 + +### Added + +- **📄 Document Selection**: Now you can select and delete multiple documents at once for easier management. + +### Changed + +- **🏷️ Document Pre-tagging**: Simply click the "+" button at the top, enter tag names in the popup window, or select from a list of existing tags. Then, upload files with the added tags for streamlined organization. + +## [0.1.104] - 2024-02-25 + +### Added + +- **🔄 Check for Updates**: Keep your system current by checking for updates conveniently located in Settings > About. +- **🗑️ Automatic Tag Deletion**: Unused tags on the sidebar will now be deleted automatically with just a click. + +### Changed + +- **🎨 Modernized Styling**: Enjoy a refreshed look with updated styling for a more contemporary experience. + +## [0.1.103] - 2024-02-25 + +### Added + +- **🔗 Built-in LiteLLM Proxy**: Now includes LiteLLM proxy within Open WebUI for enhanced functionality. + + - Easily integrate existing LiteLLM configurations using `-v /path/to/config.yaml:/app/backend/data/litellm/config.yaml` flag. + - When utilizing Docker container to run Open WebUI, ensure connections to localhost use `host.docker.internal`. + +- **🖼️ Image Generation Enhancements**: Introducing Advanced Settings with Image Preview Feature. + - Customize image generation by setting the number of steps; defaults to A1111 value. + +### Fixed + +- Resolved issue with RAG scan halting document loading upon encountering unsupported MIME types or exceptions (Issue #866). + +### Changed + +- Ollama is no longer required to run Open WebUI. +- Access our comprehensive documentation at [Open WebUI Documentation](https://docs.openwebui.com/). + +## [0.1.102] - 2024-02-22 + +### Added + +- **🖼️ Image Generation**: Generate Images using the AUTOMATIC1111/stable-diffusion-webui API. You can set this up in Settings > Images. +- **📝 Change title generation prompt**: Change the prompt used to generate titles for your chats. You can set this up in the Settings > Interface. +- **🤖 Change embedding model**: Change the embedding model used to generate embeddings for your chats in the Dockerfile. Use any sentence transformer model from huggingface.co. +- **📢 CHANGELOG.md/Popup**: This popup will show you the latest changes. + +## [0.1.101] - 2024-02-22 + +### Fixed + +- LaTex output formatting issue (#828) + +### Changed + +- Instead of having the previous 1.0.0-alpha.101, we switched to semantic versioning as a way to respect global conventions. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..37ac5263cbfdbb5a938da361397352443e57dc33 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,77 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contribute to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission +- **Spamming of any kind** +- Aggressive sales tactics targeting our community members are strictly prohibited. You can mention your product if it's relevant to the discussion, but under no circumstances should you push it forcefully +- Other conduct which could reasonably be considered inappropriate in a professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all community spaces and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, spamming, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at hello@openwebui.com. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Temporary Ban + +**Community Impact**: Any violation of community standards, including but not limited to inappropriate language, unprofessional behavior, harassment, or spamming. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + +### 2. Permanent Ban + +**Community Impact**: Repeated or severe violations of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/Caddyfile.localhost b/Caddyfile.localhost new file mode 100644 index 0000000000000000000000000000000000000000..80728eedf6ac60dc6454ef6933b490eeed4648be --- /dev/null +++ b/Caddyfile.localhost @@ -0,0 +1,64 @@ +# Run with +# caddy run --envfile ./example.env --config ./Caddyfile.localhost +# +# This is configured for +# - Automatic HTTPS (even for localhost) +# - Reverse Proxying to Ollama API Base URL (http://localhost:11434/api) +# - CORS +# - HTTP Basic Auth API Tokens (uncomment basicauth section) + + +# CORS Preflight (OPTIONS) + Request (GET, POST, PATCH, PUT, DELETE) +(cors-api) { + @match-cors-api-preflight method OPTIONS + handle @match-cors-api-preflight { + header { + Access-Control-Allow-Origin "{http.request.header.origin}" + Access-Control-Allow-Methods "GET, POST, PUT, PATCH, DELETE, OPTIONS" + Access-Control-Allow-Headers "Origin, Accept, Authorization, Content-Type, X-Requested-With" + Access-Control-Allow-Credentials "true" + Access-Control-Max-Age "3600" + defer + } + respond "" 204 + } + + @match-cors-api-request { + not { + header Origin "{http.request.scheme}://{http.request.host}" + } + header Origin "{http.request.header.origin}" + } + handle @match-cors-api-request { + header { + Access-Control-Allow-Origin "{http.request.header.origin}" + Access-Control-Allow-Methods "GET, POST, PUT, PATCH, DELETE, OPTIONS" + Access-Control-Allow-Headers "Origin, Accept, Authorization, Content-Type, X-Requested-With" + Access-Control-Allow-Credentials "true" + Access-Control-Max-Age "3600" + defer + } + } +} + +# replace localhost with example.com or whatever +localhost { + ## HTTP Basic Auth + ## (uncomment to enable) + # basicauth { + # # see .example.env for how to generate tokens + # {env.OLLAMA_API_ID} {env.OLLAMA_API_TOKEN_DIGEST} + # } + + handle /api/* { + # Comment to disable CORS + import cors-api + + reverse_proxy localhost:11434 + } + + # Same-Origin Static Web Server + file_server { + root ./build/ + } +} diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..a217595ea864027350be67650e3af33bf06d2003 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,161 @@ +# syntax=docker/dockerfile:1 +# Initialize device type args +# use build args in the docker build commmand with --build-arg="BUILDARG=true" +ARG USE_CUDA=false +ARG USE_OLLAMA=false +# Tested with cu117 for CUDA 11 and cu121 for CUDA 12 (default) +ARG USE_CUDA_VER=cu121 +# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers +# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard +# for better performance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB) +# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them. +ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2 +ARG USE_RERANKING_MODEL="" +ARG BUILD_HASH=dev-build +# Override at your own risk - non-root configurations are untested +ARG UID=0 +ARG GID=0 + +######## WebUI frontend ######## +FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build +ARG BUILD_HASH + +WORKDIR /app + +COPY package.json package-lock.json ./ +RUN npm ci + +COPY . . +ENV APP_BUILD_HASH=${BUILD_HASH} +RUN npm run build + +######## WebUI backend ######## +FROM python:3.11-slim-bookworm as base + +# Use args +ARG USE_CUDA +ARG USE_OLLAMA +ARG USE_CUDA_VER +ARG USE_EMBEDDING_MODEL +ARG USE_RERANKING_MODEL +ARG UID +ARG GID + +## Basis ## +ENV ENV=prod \ + PORT=8080 \ + # pass build args to the build + USE_OLLAMA_DOCKER=${USE_OLLAMA} \ + USE_CUDA_DOCKER=${USE_CUDA} \ + USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \ + USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \ + USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} + +## Basis URL Config ## +ENV OLLAMA_BASE_URL="/ollama" \ + OPENAI_API_BASE_URL="" + +## API Key and Security Config ## +ENV OPENAI_API_KEY="" \ + WEBUI_SECRET_KEY="" \ + SCARF_NO_ANALYTICS=true \ + DO_NOT_TRACK=true \ + ANONYMIZED_TELEMETRY=false + +#### Other models ######################################################### +## whisper TTS model settings ## +ENV WHISPER_MODEL="base" \ + WHISPER_MODEL_DIR="/app/backend/data/cache/whisper/models" + +## RAG Embedding model settings ## +ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \ + RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \ + SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models" + +## Hugging Face download cache ## +ENV HF_HOME="/app/backend/data/cache/embedding/models" +#### Other models ########################################################## + +WORKDIR /app/backend + +ENV HOME /root +# Create user and group if not root +RUN if [ $UID -ne 0 ]; then \ + if [ $GID -ne 0 ]; then \ + addgroup --gid $GID app; \ + fi; \ + adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \ + fi + +RUN mkdir -p $HOME/.cache/chroma +RUN echo -n 00000000-0000-0000-0000-000000000000 > $HOME/.cache/chroma/telemetry_user_id + +# Make sure the user has access to the app and root directory +RUN chown -R $UID:$GID /app $HOME + +RUN if [ "$USE_OLLAMA" = "true" ]; then \ + apt-get update && \ + # Install pandoc and netcat + apt-get install -y --no-install-recommends pandoc netcat-openbsd curl && \ + apt-get install -y --no-install-recommends gcc python3-dev && \ + # for RAG OCR + apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \ + # install helper tools + apt-get install -y --no-install-recommends curl jq && \ + # install ollama + curl -fsSL https://ollama.com/install.sh | sh && \ + # cleanup + rm -rf /var/lib/apt/lists/*; \ + else \ + apt-get update && \ + # Install pandoc, netcat and gcc + apt-get install -y --no-install-recommends pandoc gcc netcat-openbsd curl jq && \ + apt-get install -y --no-install-recommends gcc python3-dev && \ + # for RAG OCR + apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \ + # cleanup + rm -rf /var/lib/apt/lists/*; \ + fi + +# install python dependencies +COPY --chown=$UID:$GID ./backend/requirements.txt ./requirements.txt + +RUN pip3 install uv && \ + if [ "$USE_CUDA" = "true" ]; then \ + # If you use CUDA the whisper and embedding model will be downloaded on first use + pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/$USE_CUDA_DOCKER_VER --no-cache-dir && \ + uv pip install --system -r requirements.txt --no-cache-dir && \ + python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \ + python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \ + else \ + pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir && \ + uv pip install --system -r requirements.txt --no-cache-dir && \ + python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \ + python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \ + fi; \ + chown -R $UID:$GID /app/backend/data/ + + + +# copy embedding weight from build +# RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2 +# COPY --from=build /app/onnx /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2/onnx + +# copy built frontend files +COPY --chown=$UID:$GID --from=build /app/build /app/build +COPY --chown=$UID:$GID --from=build /app/CHANGELOG.md /app/CHANGELOG.md +COPY --chown=$UID:$GID --from=build /app/package.json /app/package.json + +# copy backend files +COPY --chown=$UID:$GID ./backend . + +EXPOSE 8080 + +HEALTHCHECK CMD curl --silent --fail http://localhost:8080/health | jq -e '.status == true' || exit 1 + +USER $UID:$GID + +ARG BUILD_HASH +ENV WEBUI_BUILD_VERSION=${BUILD_HASH} + +CMD [ "bash", "start.sh"] diff --git a/INSTALLATION.md b/INSTALLATION.md new file mode 100644 index 0000000000000000000000000000000000000000..4298b173e9fd5c372a81c86fc4121de6a57fb81b --- /dev/null +++ b/INSTALLATION.md @@ -0,0 +1,35 @@ +### Installing Both Ollama and Open WebUI Using Kustomize + +For cpu-only pod + +```bash +kubectl apply -f ./kubernetes/manifest/base +``` + +For gpu-enabled pod + +```bash +kubectl apply -k ./kubernetes/manifest +``` + +### Installing Both Ollama and Open WebUI Using Helm + +Package Helm file first + +```bash +helm package ./kubernetes/helm/ +``` + +For cpu-only pod + +```bash +helm install ollama-webui ./ollama-webui-*.tgz +``` + +For gpu-enabled pod + +```bash +helm install ollama-webui ./ollama-webui-*.tgz --set ollama.resources.limits.nvidia.com/gpu="1" +``` + +Check the `kubernetes/helm/values.yaml` file to know which parameters are available for customization diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..515e64df6c00c937379ad187de6204770db0fe18 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Timothy Jaeryang Baek + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..4b60b049658173c1e8b64dd79919fdd04d5d4f24 --- /dev/null +++ b/Makefile @@ -0,0 +1,33 @@ + +ifneq ($(shell which docker-compose 2>/dev/null),) + DOCKER_COMPOSE := docker-compose +else + DOCKER_COMPOSE := docker compose +endif + +install: + $(DOCKER_COMPOSE) up -d + +remove: + @chmod +x confirm_remove.sh + @./confirm_remove.sh + +start: + $(DOCKER_COMPOSE) start +startAndBuild: + $(DOCKER_COMPOSE) up -d --build + +stop: + $(DOCKER_COMPOSE) stop + +update: + # Calls the LLM update script + chmod +x update_ollama_models.sh + @./update_ollama_models.sh + @git pull + $(DOCKER_COMPOSE) down + # Make sure the ollama-webui container is stopped before rebuilding + @docker stop open-webui || true + $(DOCKER_COMPOSE) up --build -d + $(DOCKER_COMPOSE) start + diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..94bc79cd6886a04e45c46650b6d10bb4be7f8672 --- /dev/null +++ b/README.md @@ -0,0 +1,223 @@ +--- +title: Open WebUI +emoji: 🐳 +colorFrom: purple +colorTo: gray +sdk: docker +app_port: 8080 +hf_oauth: true +hf_oauth_scopes: + - email +--- +--- +title: Open WebUI +emoji: 🐳 +colorFrom: purple +colorTo: gray +sdk: docker +app_port: 8080 +--- + +# Open WebUI (Formerly Ollama WebUI) 👋 + +![GitHub stars](https://img.shields.io/github/stars/open-webui/open-webui?style=social) +![GitHub forks](https://img.shields.io/github/forks/open-webui/open-webui?style=social) +![GitHub watchers](https://img.shields.io/github/watchers/open-webui/open-webui?style=social) +![GitHub repo size](https://img.shields.io/github/repo-size/open-webui/open-webui) +![GitHub language count](https://img.shields.io/github/languages/count/open-webui/open-webui) +![GitHub top language](https://img.shields.io/github/languages/top/open-webui/open-webui) +![GitHub last commit](https://img.shields.io/github/last-commit/open-webui/open-webui?color=red) +![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama-webui%2Follama-wbui&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false) +[![Discord](https://img.shields.io/badge/Discord-Open_WebUI-blue?logo=discord&logoColor=white)](https://discord.gg/5rJgQTnV4s) +[![](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/tjbck) + +Open WebUI is an [extensible](https://github.com/open-webui/pipelines), feature-rich, and user-friendly self-hosted WebUI designed to operate entirely offline. It supports various LLM runners, including Ollama and OpenAI-compatible APIs. For more information, be sure to check out our [Open WebUI Documentation](https://docs.openwebui.com/). + +![Open WebUI Demo](./demo.gif) + +## Key Features of Open WebUI ⭐ + +- 🚀 **Effortless Setup**: Install seamlessly using Docker or Kubernetes (kubectl, kustomize or helm) for a hassle-free experience with support for both `:ollama` and `:cuda` tagged images. + +- 🤝 **Ollama/OpenAI API Integration**: Effortlessly integrate OpenAI-compatible APIs for versatile conversations alongside Ollama models. Customize the OpenAI API URL to link with **LMStudio, GroqCloud, Mistral, OpenRouter, and more**. + +- 🧩 **Pipelines, Open WebUI Plugin Support**: Seamlessly integrate custom logic and Python libraries into Open WebUI using [Pipelines Plugin Framework](https://github.com/open-webui/pipelines). Launch your Pipelines instance, set the OpenAI URL to the Pipelines URL, and explore endless possibilities. [Examples](https://github.com/open-webui/pipelines/tree/main/examples) include **Function Calling**, User **Rate Limiting** to control access, **Usage Monitoring** with tools like Langfuse, **Live Translation with LibreTranslate** for multilingual support, **Toxic Message Filtering** and much more. + +- 📱 **Responsive Design**: Enjoy a seamless experience across Desktop PC, Laptop, and Mobile devices. + +- 📱 **Progressive Web App (PWA) for Mobile**: Enjoy a native app-like experience on your mobile device with our PWA, providing offline access on localhost and a seamless user interface. + +- ✒️🔢 **Full Markdown and LaTeX Support**: Elevate your LLM experience with comprehensive Markdown and LaTeX capabilities for enriched interaction. + +- 🎤📹 **Hands-Free Voice/Video Call**: Experience seamless communication with integrated hands-free voice and video call features, allowing for a more dynamic and interactive chat environment. + +- 🛠️ **Model Builder**: Easily create Ollama models via the Web UI. Create and add custom characters/agents, customize chat elements, and import models effortlessly through [Open WebUI Community](https://openwebui.com/) integration. + +- 🐍 **Native Python Function Calling Tool**: Enhance your LLMs with built-in code editor support in the tools workspace. Bring Your Own Function (BYOF) by simply adding your pure Python functions, enabling seamless integration with LLMs. + +- 📚 **Local RAG Integration**: Dive into the future of chat interactions with groundbreaking Retrieval Augmented Generation (RAG) support. This feature seamlessly integrates document interactions into your chat experience. You can load documents directly into the chat or add files to your document library, effortlessly accessing them using the `#` command before a query. + +- 🔍 **Web Search for RAG**: Perform web searches using providers like `SearXNG`, `Google PSE`, `Brave Search`, `serpstack`, `serper`, `Serply`, `DuckDuckGo` and `TavilySearch` and inject the results directly into your chat experience. + +- 🌐 **Web Browsing Capability**: Seamlessly integrate websites into your chat experience using the `#` command followed by a URL. This feature allows you to incorporate web content directly into your conversations, enhancing the richness and depth of your interactions. + +- 🎨 **Image Generation Integration**: Seamlessly incorporate image generation capabilities using options such as AUTOMATIC1111 API or ComfyUI (local), and OpenAI's DALL-E (external), enriching your chat experience with dynamic visual content. + +- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel. + +- 🔐 **Role-Based Access Control (RBAC)**: Ensure secure access with restricted permissions; only authorized individuals can access your Ollama, and exclusive model creation/pulling rights are reserved for administrators. + +- 🌐🌍 **Multilingual Support**: Experience Open WebUI in your preferred language with our internationalization (i18n) support. Join us in expanding our supported languages! We're actively seeking contributors! + +- 🌟 **Continuous Updates**: We are committed to improving Open WebUI with regular updates, fixes, and new features. + +Want to learn more about Open WebUI's features? Check out our [Open WebUI documentation](https://docs.openwebui.com/features) for a comprehensive overview! + +## 🔗 Also Check Out Open WebUI Community! + +Don't forget to explore our sibling project, [Open WebUI Community](https://openwebui.com/), where you can discover, download, and explore customized Modelfiles. Open WebUI Community offers a wide range of exciting possibilities for enhancing your chat interactions with Open WebUI! 🚀 + +## How to Install 🚀 + +> [!NOTE] +> Please note that for certain Docker environments, additional configurations might be needed. If you encounter any connection issues, our detailed guide on [Open WebUI Documentation](https://docs.openwebui.com/) is ready to assist you. + +### Quick Start with Docker 🐳 + +> [!WARNING] +> When using Docker to install Open WebUI, make sure to include the `-v open-webui:/app/backend/data` in your Docker command. This step is crucial as it ensures your database is properly mounted and prevents any loss of data. + +> [!TIP] +> If you wish to utilize Open WebUI with Ollama included or CUDA acceleration, we recommend utilizing our official images tagged with either `:cuda` or `:ollama`. To enable CUDA, you must install the [Nvidia CUDA container toolkit](https://docs.nvidia.com/dgx/nvidia-container-runtime-upgrade/) on your Linux/WSL system. + +### Installation with Default Configuration + +- **If Ollama is on your computer**, use this command: + + ```bash + docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main + ``` + +- **If Ollama is on a Different Server**, use this command: + + To connect to Ollama on another server, change the `OLLAMA_BASE_URL` to the server's URL: + + ```bash + docker run -d -p 3000:8080 -e OLLAMA_BASE_URL=https://example.com -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main + ``` + + - **To run Open WebUI with Nvidia GPU support**, use this command: + + ```bash + docker run -d -p 3000:8080 --gpus all --add-host=host.docker.internal:host-gateway -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:cuda + ``` + +### Installation for OpenAI API Usage Only + +- **If you're only using OpenAI API**, use this command: + + ```bash + docker run -d -p 3000:8080 -e OPENAI_API_KEY=your_secret_key -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main + ``` + +### Installing Open WebUI with Bundled Ollama Support + +This installation method uses a single container image that bundles Open WebUI with Ollama, allowing for a streamlined setup via a single command. Choose the appropriate command based on your hardware setup: + +- **With GPU Support**: + Utilize GPU resources by running the following command: + + ```bash + docker run -d -p 3000:8080 --gpus=all -v ollama:/root/.ollama -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:ollama + ``` + +- **For CPU Only**: + If you're not using a GPU, use this command instead: + + ```bash + docker run -d -p 3000:8080 -v ollama:/root/.ollama -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:ollama + ``` + +Both commands facilitate a built-in, hassle-free installation of both Open WebUI and Ollama, ensuring that you can get everything up and running swiftly. + +After installation, you can access Open WebUI at [http://localhost:3000](http://localhost:3000). Enjoy! 😄 + +### Other Installation Methods + +We offer various installation alternatives, including non-Docker native installation methods, Docker Compose, Kustomize, and Helm. Visit our [Open WebUI Documentation](https://docs.openwebui.com/getting-started/) or join our [Discord community](https://discord.gg/5rJgQTnV4s) for comprehensive guidance. + +### Troubleshooting + +Encountering connection issues? Our [Open WebUI Documentation](https://docs.openwebui.com/troubleshooting/) has got you covered. For further assistance and to join our vibrant community, visit the [Open WebUI Discord](https://discord.gg/5rJgQTnV4s). + +#### Open WebUI: Server Connection Error + +If you're experiencing connection issues, it’s often due to the WebUI docker container not being able to reach the Ollama server at 127.0.0.1:11434 (host.docker.internal:11434) inside the container . Use the `--network=host` flag in your docker command to resolve this. Note that the port changes from 3000 to 8080, resulting in the link: `http://localhost:8080`. + +**Example Docker Command**: + +```bash +docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=http://127.0.0.1:11434 --name open-webui --restart always ghcr.io/open-webui/open-webui:main +``` + +### Keeping Your Docker Installation Up-to-Date + +In case you want to update your local Docker installation to the latest version, you can do it with [Watchtower](https://containrrr.dev/watchtower/): + +```bash +docker run --rm --volume /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower --run-once open-webui +``` + +In the last part of the command, replace `open-webui` with your container name if it is different. + +Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/migration/). + +### Using the Dev Branch 🌙 + +> [!WARNING] +> The `:dev` branch contains the latest unstable features and changes. Use it at your own risk as it may have bugs or incomplete features. + +If you want to try out the latest bleeding-edge features and are okay with occasional instability, you can use the `:dev` tag like this: + +```bash +docker run -d -p 3000:8080 -v open-webui:/app/backend/data --name open-webui --add-host=host.docker.internal:host-gateway --restart always ghcr.io/open-webui/open-webui:dev +``` + +## What's Next? 🌟 + +Discover upcoming features on our roadmap in the [Open WebUI Documentation](https://docs.openwebui.com/roadmap/). + +## Supporters ✨ + +A big shoutout to our amazing supporters who's helping to make this project possible! 🙏 + +### Platinum Sponsors 🤍 + +- We're looking for Sponsors! + +### Acknowledgments + +Special thanks to [Prof. Lawrence Kim](https://www.lhkim.com/) and [Prof. Nick Vincent](https://www.nickmvincent.com/) for their invaluable support and guidance in shaping this project into a research endeavor. Grateful for your mentorship throughout the journey! 🙌 + +## License 📜 + +This project is licensed under the [MIT License](LICENSE) - see the [LICENSE](LICENSE) file for details. 📄 + +## Support 💬 + +If you have any questions, suggestions, or need assistance, please open an issue or join our +[Open WebUI Discord community](https://discord.gg/5rJgQTnV4s) to connect with us! 🤝 + +## Star History + + + + + + Star History Chart + + + +--- + +Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Open WebUI even more amazing together! 💪 diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md new file mode 100644 index 0000000000000000000000000000000000000000..9bf242381ce42f62a025445a6f33432b33b6716d --- /dev/null +++ b/TROUBLESHOOTING.md @@ -0,0 +1,36 @@ +# Open WebUI Troubleshooting Guide + +## Understanding the Open WebUI Architecture + +The Open WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues. + +- **How it Works**: The Open WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Open WebUI backend via `/ollama` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_BASE_URL` environment variable. Therefore, a request made to `/ollama` in the WebUI is effectively the same as making a request to `OLLAMA_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_BASE_URL/api/tags` in the backend. + +- **Security Benefits**: This design prevents direct exposure of the Ollama API to the frontend, safeguarding against potential CORS (Cross-Origin Resource Sharing) issues and unauthorized access. Requiring authentication to access the Ollama API further enhances this security layer. + +## Open WebUI: Server Connection Error + +If you're experiencing connection issues, it’s often due to the WebUI docker container not being able to reach the Ollama server at 127.0.0.1:11434 (host.docker.internal:11434) inside the container . Use the `--network=host` flag in your docker command to resolve this. Note that the port changes from 3000 to 8080, resulting in the link: `http://localhost:8080`. + +**Example Docker Command**: + +```bash +docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=http://127.0.0.1:11434 --name open-webui --restart always ghcr.io/open-webui/open-webui:main +``` + +### Error on Slow Reponses for Ollama + +Open WebUI has a default timeout of 5 minutes for Ollama to finish generating the response. If needed, this can be adjusted via the environment variable AIOHTTP_CLIENT_TIMEOUT, which sets the timeout in seconds. + +### General Connection Errors + +**Ensure Ollama Version is Up-to-Date**: Always start by checking that you have the latest version of Ollama. Visit [Ollama's official site](https://ollama.com/) for the latest updates. + +**Troubleshooting Steps**: + +1. **Verify Ollama URL Format**: + - When running the Web UI container, ensure the `OLLAMA_BASE_URL` is correctly set. (e.g., `http://192.168.1.1:11434` for different host setups). + - In the Open WebUI, navigate to "Settings" > "General". + - Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]` (e.g., `http://localhost:11434`). + +By following these enhanced troubleshooting steps, connection issues should be effectively resolved. For further assistance or queries, feel free to reach out to us on our community Discord. diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..97ab32835d90e779180b09b866c7eecbdb1433ac --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,14 @@ +__pycache__ +.env +_old +uploads +.ipynb_checkpoints +*.db +_test +!/data +/data/* +!/data/litellm +/data/litellm/* +!data/litellm/config.yaml + +!data/config.json \ No newline at end of file diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..ea83b34f43e5b879df2dff7ff295e8daf11d448e --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,16 @@ +__pycache__ +.env +_old +uploads +.ipynb_checkpoints +*.db +_test +Pipfile +!/data +/data/* +!/data/litellm +/data/litellm/* +!data/litellm/config.yaml + +!data/config.json +.webui_secret_key \ No newline at end of file diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..4eff85f0c621c16f5afc80a4d92dc75da1483ac8 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# sqlalchemy.url = REPLACE_WITH_DATABASE_URL + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/apps/audio/main.py b/backend/apps/audio/main.py new file mode 100644 index 0000000000000000000000000000000000000000..c565bf481baf8da9e6fc01bb95b0d05bf7605136 --- /dev/null +++ b/backend/apps/audio/main.py @@ -0,0 +1,437 @@ +import os +import logging +from fastapi import ( + FastAPI, + Request, + Depends, + HTTPException, + status, + UploadFile, + File, + Form, +) + +from fastapi.responses import StreamingResponse, JSONResponse, FileResponse + +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel + +import uuid +import requests +import hashlib +from pathlib import Path +import json + +from constants import ERROR_MESSAGES +from utils.utils import ( + decode_token, + get_current_user, + get_verified_user, + get_admin_user, +) +from utils.misc import calculate_sha256 + +from config import ( + SRC_LOG_LEVELS, + CACHE_DIR, + UPLOAD_DIR, + WHISPER_MODEL, + WHISPER_MODEL_DIR, + WHISPER_MODEL_AUTO_UPDATE, + DEVICE_TYPE, + AUDIO_STT_OPENAI_API_BASE_URL, + AUDIO_STT_OPENAI_API_KEY, + AUDIO_TTS_OPENAI_API_BASE_URL, + AUDIO_TTS_OPENAI_API_KEY, + AUDIO_TTS_API_KEY, + AUDIO_STT_ENGINE, + AUDIO_STT_MODEL, + AUDIO_TTS_ENGINE, + AUDIO_TTS_MODEL, + AUDIO_TTS_VOICE, + AppConfig, +) + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["AUDIO"]) + +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.state.config = AppConfig() + +app.state.config.STT_OPENAI_API_BASE_URL = AUDIO_STT_OPENAI_API_BASE_URL +app.state.config.STT_OPENAI_API_KEY = AUDIO_STT_OPENAI_API_KEY +app.state.config.STT_ENGINE = AUDIO_STT_ENGINE +app.state.config.STT_MODEL = AUDIO_STT_MODEL + +app.state.config.TTS_OPENAI_API_BASE_URL = AUDIO_TTS_OPENAI_API_BASE_URL +app.state.config.TTS_OPENAI_API_KEY = AUDIO_TTS_OPENAI_API_KEY +app.state.config.TTS_ENGINE = AUDIO_TTS_ENGINE +app.state.config.TTS_MODEL = AUDIO_TTS_MODEL +app.state.config.TTS_VOICE = AUDIO_TTS_VOICE +app.state.config.TTS_API_KEY = AUDIO_TTS_API_KEY + +# setting device type for whisper model +whisper_device_type = DEVICE_TYPE if DEVICE_TYPE and DEVICE_TYPE == "cuda" else "cpu" +log.info(f"whisper_device_type: {whisper_device_type}") + +SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/") +SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True) + + +class TTSConfigForm(BaseModel): + OPENAI_API_BASE_URL: str + OPENAI_API_KEY: str + API_KEY: str + ENGINE: str + MODEL: str + VOICE: str + + +class STTConfigForm(BaseModel): + OPENAI_API_BASE_URL: str + OPENAI_API_KEY: str + ENGINE: str + MODEL: str + + +class AudioConfigUpdateForm(BaseModel): + tts: TTSConfigForm + stt: STTConfigForm + + +from pydub import AudioSegment +from pydub.utils import mediainfo + + +def is_mp4_audio(file_path): + """Check if the given file is an MP4 audio file.""" + if not os.path.isfile(file_path): + print(f"File not found: {file_path}") + return False + + info = mediainfo(file_path) + if ( + info.get("codec_name") == "aac" + and info.get("codec_type") == "audio" + and info.get("codec_tag_string") == "mp4a" + ): + return True + return False + + +def convert_mp4_to_wav(file_path, output_path): + """Convert MP4 audio file to WAV format.""" + audio = AudioSegment.from_file(file_path, format="mp4") + audio.export(output_path, format="wav") + print(f"Converted {file_path} to {output_path}") + + +@app.get("/config") +async def get_audio_config(user=Depends(get_admin_user)): + return { + "tts": { + "OPENAI_API_BASE_URL": app.state.config.TTS_OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.TTS_OPENAI_API_KEY, + "API_KEY": app.state.config.TTS_API_KEY, + "ENGINE": app.state.config.TTS_ENGINE, + "MODEL": app.state.config.TTS_MODEL, + "VOICE": app.state.config.TTS_VOICE, + }, + "stt": { + "OPENAI_API_BASE_URL": app.state.config.STT_OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.STT_OPENAI_API_KEY, + "ENGINE": app.state.config.STT_ENGINE, + "MODEL": app.state.config.STT_MODEL, + }, + } + + +@app.post("/config/update") +async def update_audio_config( + form_data: AudioConfigUpdateForm, user=Depends(get_admin_user) +): + app.state.config.TTS_OPENAI_API_BASE_URL = form_data.tts.OPENAI_API_BASE_URL + app.state.config.TTS_OPENAI_API_KEY = form_data.tts.OPENAI_API_KEY + app.state.config.TTS_API_KEY = form_data.tts.API_KEY + app.state.config.TTS_ENGINE = form_data.tts.ENGINE + app.state.config.TTS_MODEL = form_data.tts.MODEL + app.state.config.TTS_VOICE = form_data.tts.VOICE + + app.state.config.STT_OPENAI_API_BASE_URL = form_data.stt.OPENAI_API_BASE_URL + app.state.config.STT_OPENAI_API_KEY = form_data.stt.OPENAI_API_KEY + app.state.config.STT_ENGINE = form_data.stt.ENGINE + app.state.config.STT_MODEL = form_data.stt.MODEL + + return { + "tts": { + "OPENAI_API_BASE_URL": app.state.config.TTS_OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.TTS_OPENAI_API_KEY, + "API_KEY": app.state.config.TTS_API_KEY, + "ENGINE": app.state.config.TTS_ENGINE, + "MODEL": app.state.config.TTS_MODEL, + "VOICE": app.state.config.TTS_VOICE, + }, + "stt": { + "OPENAI_API_BASE_URL": app.state.config.STT_OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.STT_OPENAI_API_KEY, + "ENGINE": app.state.config.STT_ENGINE, + "MODEL": app.state.config.STT_MODEL, + }, + } + + +@app.post("/speech") +async def speech(request: Request, user=Depends(get_verified_user)): + body = await request.body() + name = hashlib.sha256(body).hexdigest() + + file_path = SPEECH_CACHE_DIR.joinpath(f"{name}.mp3") + file_body_path = SPEECH_CACHE_DIR.joinpath(f"{name}.json") + + # Check if the file already exists in the cache + if file_path.is_file(): + return FileResponse(file_path) + + if app.state.config.TTS_ENGINE == "openai": + headers = {} + headers["Authorization"] = f"Bearer {app.state.config.TTS_OPENAI_API_KEY}" + headers["Content-Type"] = "application/json" + + try: + body = body.decode("utf-8") + body = json.loads(body) + body["model"] = app.state.config.TTS_MODEL + body = json.dumps(body).encode("utf-8") + except Exception as e: + pass + + r = None + try: + r = requests.post( + url=f"{app.state.config.TTS_OPENAI_API_BASE_URL}/audio/speech", + data=body, + headers=headers, + stream=True, + ) + + r.raise_for_status() + + # Save the streaming content to a file + with open(file_path, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + with open(file_body_path, "w") as f: + json.dump(json.loads(body.decode("utf-8")), f) + + # Return the saved file + return FileResponse(file_path) + + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"External: {res['error']['message']}" + except: + error_detail = f"External: {e}" + + raise HTTPException( + status_code=r.status_code if r != None else 500, + detail=error_detail, + ) + + elif app.state.config.TTS_ENGINE == "elevenlabs": + + payload = None + try: + payload = json.loads(body.decode("utf-8")) + except Exception as e: + log.exception(e) + pass + + url = f"https://api.elevenlabs.io/v1/text-to-speech/{payload['voice']}" + + headers = { + "Accept": "audio/mpeg", + "Content-Type": "application/json", + "xi-api-key": app.state.config.TTS_API_KEY, + } + + data = { + "text": payload["input"], + "model_id": app.state.config.TTS_MODEL, + "voice_settings": {"stability": 0.5, "similarity_boost": 0.5}, + } + + try: + r = requests.post(url, json=data, headers=headers) + + r.raise_for_status() + + # Save the streaming content to a file + with open(file_path, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + with open(file_body_path, "w") as f: + json.dump(json.loads(body.decode("utf-8")), f) + + # Return the saved file + return FileResponse(file_path) + + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"External: {res['error']['message']}" + except: + error_detail = f"External: {e}" + + raise HTTPException( + status_code=r.status_code if r != None else 500, + detail=error_detail, + ) + + +@app.post("/transcriptions") +def transcribe( + file: UploadFile = File(...), + user=Depends(get_current_user), +): + log.info(f"file.content_type: {file.content_type}") + + if file.content_type not in ["audio/mpeg", "audio/wav"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED, + ) + + try: + ext = file.filename.split(".")[-1] + + id = uuid.uuid4() + filename = f"{id}.{ext}" + + file_dir = f"{CACHE_DIR}/audio/transcriptions" + os.makedirs(file_dir, exist_ok=True) + file_path = f"{file_dir}/{filename}" + + print(filename) + + contents = file.file.read() + with open(file_path, "wb") as f: + f.write(contents) + f.close() + + if app.state.config.STT_ENGINE == "": + from faster_whisper import WhisperModel + + whisper_kwargs = { + "model_size_or_path": WHISPER_MODEL, + "device": whisper_device_type, + "compute_type": "int8", + "download_root": WHISPER_MODEL_DIR, + "local_files_only": not WHISPER_MODEL_AUTO_UPDATE, + } + + log.debug(f"whisper_kwargs: {whisper_kwargs}") + + try: + model = WhisperModel(**whisper_kwargs) + except: + log.warning( + "WhisperModel initialization failed, attempting download with local_files_only=False" + ) + whisper_kwargs["local_files_only"] = False + model = WhisperModel(**whisper_kwargs) + + segments, info = model.transcribe(file_path, beam_size=5) + log.info( + "Detected language '%s' with probability %f" + % (info.language, info.language_probability) + ) + + transcript = "".join([segment.text for segment in list(segments)]) + + data = {"text": transcript.strip()} + + # save the transcript to a json file + transcript_file = f"{file_dir}/{id}.json" + with open(transcript_file, "w") as f: + json.dump(data, f) + + print(data) + + return data + + elif app.state.config.STT_ENGINE == "openai": + if is_mp4_audio(file_path): + print("is_mp4_audio") + os.rename(file_path, file_path.replace(".wav", ".mp4")) + # Convert MP4 audio file to WAV format + convert_mp4_to_wav(file_path.replace(".wav", ".mp4"), file_path) + + headers = {"Authorization": f"Bearer {app.state.config.STT_OPENAI_API_KEY}"} + + files = {"file": (filename, open(file_path, "rb"))} + data = {"model": app.state.config.STT_MODEL} + + print(files, data) + + r = None + try: + r = requests.post( + url=f"{app.state.config.STT_OPENAI_API_BASE_URL}/audio/transcriptions", + headers=headers, + files=files, + data=data, + ) + + r.raise_for_status() + + data = r.json() + + # save the transcript to a json file + transcript_file = f"{file_dir}/{id}.json" + with open(transcript_file, "w") as f: + json.dump(data, f) + + print(data) + return data + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"External: {res['error']['message']}" + except: + error_detail = f"External: {e}" + + raise HTTPException( + status_code=r.status_code if r != None else 500, + detail=error_detail, + ) + + except Exception as e: + log.exception(e) + + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) diff --git a/backend/apps/images/main.py b/backend/apps/images/main.py new file mode 100644 index 0000000000000000000000000000000000000000..9ae0ad67b7e05c00cd9551f0cbda1cc9814573aa --- /dev/null +++ b/backend/apps/images/main.py @@ -0,0 +1,568 @@ +import re +import requests +import base64 +from fastapi import ( + FastAPI, + Request, + Depends, + HTTPException, + status, + UploadFile, + File, + Form, +) +from fastapi.middleware.cors import CORSMiddleware + +from constants import ERROR_MESSAGES +from utils.utils import ( + get_verified_user, + get_admin_user, +) + +from apps.images.utils.comfyui import ImageGenerationPayload, comfyui_generate_image +from utils.misc import calculate_sha256 +from typing import Optional +from pydantic import BaseModel +from pathlib import Path +import mimetypes +import uuid +import base64 +import json +import logging + +from config import ( + SRC_LOG_LEVELS, + CACHE_DIR, + IMAGE_GENERATION_ENGINE, + ENABLE_IMAGE_GENERATION, + AUTOMATIC1111_BASE_URL, + AUTOMATIC1111_API_AUTH, + COMFYUI_BASE_URL, + COMFYUI_CFG_SCALE, + COMFYUI_SAMPLER, + COMFYUI_SCHEDULER, + COMFYUI_SD3, + IMAGES_OPENAI_API_BASE_URL, + IMAGES_OPENAI_API_KEY, + IMAGE_GENERATION_MODEL, + IMAGE_SIZE, + IMAGE_STEPS, + AppConfig, +) + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["IMAGES"]) + +IMAGE_CACHE_DIR = Path(CACHE_DIR).joinpath("./image/generations/") +IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True) + +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.state.config = AppConfig() + +app.state.config.ENGINE = IMAGE_GENERATION_ENGINE +app.state.config.ENABLED = ENABLE_IMAGE_GENERATION + +app.state.config.OPENAI_API_BASE_URL = IMAGES_OPENAI_API_BASE_URL +app.state.config.OPENAI_API_KEY = IMAGES_OPENAI_API_KEY + +app.state.config.MODEL = IMAGE_GENERATION_MODEL + +app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL +app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH +app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL + +app.state.config.IMAGE_SIZE = IMAGE_SIZE +app.state.config.IMAGE_STEPS = IMAGE_STEPS +app.state.config.COMFYUI_CFG_SCALE = COMFYUI_CFG_SCALE +app.state.config.COMFYUI_SAMPLER = COMFYUI_SAMPLER +app.state.config.COMFYUI_SCHEDULER = COMFYUI_SCHEDULER +app.state.config.COMFYUI_SD3 = COMFYUI_SD3 + + +def get_automatic1111_api_auth(): + if app.state.config.AUTOMATIC1111_API_AUTH == None: + return "" + else: + auth1111_byte_string = app.state.config.AUTOMATIC1111_API_AUTH.encode("utf-8") + auth1111_base64_encoded_bytes = base64.b64encode(auth1111_byte_string) + auth1111_base64_encoded_string = auth1111_base64_encoded_bytes.decode("utf-8") + return f"Basic {auth1111_base64_encoded_string}" + + +@app.get("/config") +async def get_config(request: Request, user=Depends(get_admin_user)): + return { + "engine": app.state.config.ENGINE, + "enabled": app.state.config.ENABLED, + } + + +class ConfigUpdateForm(BaseModel): + engine: str + enabled: bool + + +@app.post("/config/update") +async def update_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)): + app.state.config.ENGINE = form_data.engine + app.state.config.ENABLED = form_data.enabled + return { + "engine": app.state.config.ENGINE, + "enabled": app.state.config.ENABLED, + } + + +class EngineUrlUpdateForm(BaseModel): + AUTOMATIC1111_BASE_URL: Optional[str] = None + AUTOMATIC1111_API_AUTH: Optional[str] = None + COMFYUI_BASE_URL: Optional[str] = None + + +@app.get("/url") +async def get_engine_url(user=Depends(get_admin_user)): + return { + "AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL, + "AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH, + "COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL, + } + + +@app.post("/url/update") +async def update_engine_url( + form_data: EngineUrlUpdateForm, user=Depends(get_admin_user) +): + if form_data.AUTOMATIC1111_BASE_URL == None: + app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL + else: + url = form_data.AUTOMATIC1111_BASE_URL.strip("/") + try: + r = requests.head(url) + app.state.config.AUTOMATIC1111_BASE_URL = url + except Exception as e: + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e)) + + if form_data.COMFYUI_BASE_URL == None: + app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL + else: + url = form_data.COMFYUI_BASE_URL.strip("/") + + try: + r = requests.head(url) + app.state.config.COMFYUI_BASE_URL = url + except Exception as e: + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e)) + + if form_data.AUTOMATIC1111_API_AUTH == None: + app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH + else: + app.state.config.AUTOMATIC1111_API_AUTH = form_data.AUTOMATIC1111_API_AUTH + + return { + "AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL, + "AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH, + "COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL, + "status": True, + } + + +class OpenAIConfigUpdateForm(BaseModel): + url: str + key: str + + +@app.get("/openai/config") +async def get_openai_config(user=Depends(get_admin_user)): + return { + "OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.OPENAI_API_KEY, + } + + +@app.post("/openai/config/update") +async def update_openai_config( + form_data: OpenAIConfigUpdateForm, user=Depends(get_admin_user) +): + if form_data.key == "": + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND) + + app.state.config.OPENAI_API_BASE_URL = form_data.url + app.state.config.OPENAI_API_KEY = form_data.key + + return { + "status": True, + "OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL, + "OPENAI_API_KEY": app.state.config.OPENAI_API_KEY, + } + + +class ImageSizeUpdateForm(BaseModel): + size: str + + +@app.get("/size") +async def get_image_size(user=Depends(get_admin_user)): + return {"IMAGE_SIZE": app.state.config.IMAGE_SIZE} + + +@app.post("/size/update") +async def update_image_size( + form_data: ImageSizeUpdateForm, user=Depends(get_admin_user) +): + pattern = r"^\d+x\d+$" # Regular expression pattern + if re.match(pattern, form_data.size): + app.state.config.IMAGE_SIZE = form_data.size + return { + "IMAGE_SIZE": app.state.config.IMAGE_SIZE, + "status": True, + } + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.INCORRECT_FORMAT(" (e.g., 512x512)."), + ) + + +class ImageStepsUpdateForm(BaseModel): + steps: int + + +@app.get("/steps") +async def get_image_size(user=Depends(get_admin_user)): + return {"IMAGE_STEPS": app.state.config.IMAGE_STEPS} + + +@app.post("/steps/update") +async def update_image_size( + form_data: ImageStepsUpdateForm, user=Depends(get_admin_user) +): + if form_data.steps >= 0: + app.state.config.IMAGE_STEPS = form_data.steps + return { + "IMAGE_STEPS": app.state.config.IMAGE_STEPS, + "status": True, + } + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.INCORRECT_FORMAT(" (e.g., 50)."), + ) + + +@app.get("/models") +def get_models(user=Depends(get_verified_user)): + try: + if app.state.config.ENGINE == "openai": + return [ + {"id": "dall-e-2", "name": "DALL·E 2"}, + {"id": "dall-e-3", "name": "DALL·E 3"}, + ] + elif app.state.config.ENGINE == "comfyui": + + r = requests.get(url=f"{app.state.config.COMFYUI_BASE_URL}/object_info") + info = r.json() + + return list( + map( + lambda model: {"id": model, "name": model}, + info["CheckpointLoaderSimple"]["input"]["required"]["ckpt_name"][0], + ) + ) + + else: + r = requests.get( + url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/sd-models", + headers={"authorization": get_automatic1111_api_auth()}, + ) + models = r.json() + return list( + map( + lambda model: {"id": model["title"], "name": model["model_name"]}, + models, + ) + ) + except Exception as e: + app.state.config.ENABLED = False + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e)) + + +@app.get("/models/default") +async def get_default_model(user=Depends(get_admin_user)): + try: + if app.state.config.ENGINE == "openai": + return { + "model": ( + app.state.config.MODEL if app.state.config.MODEL else "dall-e-2" + ) + } + elif app.state.config.ENGINE == "comfyui": + return {"model": (app.state.config.MODEL if app.state.config.MODEL else "")} + else: + r = requests.get( + url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options", + headers={"authorization": get_automatic1111_api_auth()}, + ) + options = r.json() + return {"model": options["sd_model_checkpoint"]} + except Exception as e: + app.state.config.ENABLED = False + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e)) + + +class UpdateModelForm(BaseModel): + model: str + + +def set_model_handler(model: str): + if app.state.config.ENGINE in ["openai", "comfyui"]: + app.state.config.MODEL = model + return app.state.config.MODEL + else: + api_auth = get_automatic1111_api_auth() + r = requests.get( + url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options", + headers={"authorization": api_auth}, + ) + options = r.json() + + if model != options["sd_model_checkpoint"]: + options["sd_model_checkpoint"] = model + r = requests.post( + url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options", + json=options, + headers={"authorization": api_auth}, + ) + + return options + + +@app.post("/models/default/update") +def update_default_model( + form_data: UpdateModelForm, + user=Depends(get_verified_user), +): + return set_model_handler(form_data.model) + + +class GenerateImageForm(BaseModel): + model: Optional[str] = None + prompt: str + n: int = 1 + size: Optional[str] = None + negative_prompt: Optional[str] = None + + +def save_b64_image(b64_str): + try: + image_id = str(uuid.uuid4()) + + if "," in b64_str: + header, encoded = b64_str.split(",", 1) + mime_type = header.split(";")[0] + + img_data = base64.b64decode(encoded) + image_format = mimetypes.guess_extension(mime_type) + + image_filename = f"{image_id}{image_format}" + file_path = IMAGE_CACHE_DIR / f"{image_filename}" + with open(file_path, "wb") as f: + f.write(img_data) + return image_filename + else: + image_filename = f"{image_id}.png" + file_path = IMAGE_CACHE_DIR.joinpath(image_filename) + + img_data = base64.b64decode(b64_str) + + # Write the image data to a file + with open(file_path, "wb") as f: + f.write(img_data) + return image_filename + + except Exception as e: + log.exception(f"Error saving image: {e}") + return None + + +def save_url_image(url): + image_id = str(uuid.uuid4()) + try: + r = requests.get(url) + r.raise_for_status() + if r.headers["content-type"].split("/")[0] == "image": + + mime_type = r.headers["content-type"] + image_format = mimetypes.guess_extension(mime_type) + + if not image_format: + raise ValueError("Could not determine image type from MIME type") + + image_filename = f"{image_id}{image_format}" + + file_path = IMAGE_CACHE_DIR.joinpath(f"{image_filename}") + with open(file_path, "wb") as image_file: + for chunk in r.iter_content(chunk_size=8192): + image_file.write(chunk) + return image_filename + else: + log.error(f"Url does not point to an image.") + return None + + except Exception as e: + log.exception(f"Error saving image: {e}") + return None + + +@app.post("/generations") +async def image_generations( + form_data: GenerateImageForm, + user=Depends(get_verified_user), +): + width, height = tuple(map(int, app.state.config.IMAGE_SIZE.split("x"))) + + r = None + try: + if app.state.config.ENGINE == "openai": + + headers = {} + headers["Authorization"] = f"Bearer {app.state.config.OPENAI_API_KEY}" + headers["Content-Type"] = "application/json" + + data = { + "model": ( + app.state.config.MODEL + if app.state.config.MODEL != "" + else "dall-e-2" + ), + "prompt": form_data.prompt, + "n": form_data.n, + "size": ( + form_data.size if form_data.size else app.state.config.IMAGE_SIZE + ), + "response_format": "b64_json", + } + + r = requests.post( + url=f"{app.state.config.OPENAI_API_BASE_URL}/images/generations", + json=data, + headers=headers, + ) + + r.raise_for_status() + res = r.json() + + images = [] + + for image in res["data"]: + image_filename = save_b64_image(image["b64_json"]) + images.append({"url": f"/cache/image/generations/{image_filename}"}) + file_body_path = IMAGE_CACHE_DIR.joinpath(f"{image_filename}.json") + + with open(file_body_path, "w") as f: + json.dump(data, f) + + return images + + elif app.state.config.ENGINE == "comfyui": + + data = { + "prompt": form_data.prompt, + "width": width, + "height": height, + "n": form_data.n, + } + + if app.state.config.IMAGE_STEPS is not None: + data["steps"] = app.state.config.IMAGE_STEPS + + if form_data.negative_prompt is not None: + data["negative_prompt"] = form_data.negative_prompt + + if app.state.config.COMFYUI_CFG_SCALE: + data["cfg_scale"] = app.state.config.COMFYUI_CFG_SCALE + + if app.state.config.COMFYUI_SAMPLER is not None: + data["sampler"] = app.state.config.COMFYUI_SAMPLER + + if app.state.config.COMFYUI_SCHEDULER is not None: + data["scheduler"] = app.state.config.COMFYUI_SCHEDULER + + if app.state.config.COMFYUI_SD3 is not None: + data["sd3"] = app.state.config.COMFYUI_SD3 + + data = ImageGenerationPayload(**data) + + res = comfyui_generate_image( + app.state.config.MODEL, + data, + user.id, + app.state.config.COMFYUI_BASE_URL, + ) + log.debug(f"res: {res}") + + images = [] + + for image in res["data"]: + image_filename = save_url_image(image["url"]) + images.append({"url": f"/cache/image/generations/{image_filename}"}) + file_body_path = IMAGE_CACHE_DIR.joinpath(f"{image_filename}.json") + + with open(file_body_path, "w") as f: + json.dump(data.model_dump(exclude_none=True), f) + + log.debug(f"images: {images}") + return images + else: + if form_data.model: + set_model_handler(form_data.model) + + data = { + "prompt": form_data.prompt, + "batch_size": form_data.n, + "width": width, + "height": height, + } + + if app.state.config.IMAGE_STEPS is not None: + data["steps"] = app.state.config.IMAGE_STEPS + + if form_data.negative_prompt is not None: + data["negative_prompt"] = form_data.negative_prompt + + r = requests.post( + url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/txt2img", + json=data, + headers={"authorization": get_automatic1111_api_auth()}, + ) + + res = r.json() + + log.debug(f"res: {res}") + + images = [] + + for image in res["images"]: + image_filename = save_b64_image(image) + images.append({"url": f"/cache/image/generations/{image_filename}"}) + file_body_path = IMAGE_CACHE_DIR.joinpath(f"{image_filename}.json") + + with open(file_body_path, "w") as f: + json.dump({**data, "info": res["info"]}, f) + + return images + + except Exception as e: + error = e + + if r != None: + data = r.json() + if "error" in data: + error = data["error"]["message"] + raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(error)) diff --git a/backend/apps/images/utils/comfyui.py b/backend/apps/images/utils/comfyui.py new file mode 100644 index 0000000000000000000000000000000000000000..599b1f337996764668aa9fcf57e3abae0055b5ab --- /dev/null +++ b/backend/apps/images/utils/comfyui.py @@ -0,0 +1,250 @@ +import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client) +import uuid +import json +import urllib.request +import urllib.parse +import random +import logging + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["COMFYUI"]) + +from pydantic import BaseModel + +from typing import Optional + +COMFYUI_DEFAULT_PROMPT = """ +{ + "3": { + "inputs": { + "seed": 0, + "steps": 20, + "cfg": 8, + "sampler_name": "euler", + "scheduler": "normal", + "denoise": 1, + "model": [ + "4", + 0 + ], + "positive": [ + "6", + 0 + ], + "negative": [ + "7", + 0 + ], + "latent_image": [ + "5", + 0 + ] + }, + "class_type": "KSampler", + "_meta": { + "title": "KSampler" + } + }, + "4": { + "inputs": { + "ckpt_name": "model.safetensors" + }, + "class_type": "CheckpointLoaderSimple", + "_meta": { + "title": "Load Checkpoint" + } + }, + "5": { + "inputs": { + "width": 512, + "height": 512, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage", + "_meta": { + "title": "Empty Latent Image" + } + }, + "6": { + "inputs": { + "text": "Prompt", + "clip": [ + "4", + 1 + ] + }, + "class_type": "CLIPTextEncode", + "_meta": { + "title": "CLIP Text Encode (Prompt)" + } + }, + "7": { + "inputs": { + "text": "Negative Prompt", + "clip": [ + "4", + 1 + ] + }, + "class_type": "CLIPTextEncode", + "_meta": { + "title": "CLIP Text Encode (Prompt)" + } + }, + "8": { + "inputs": { + "samples": [ + "3", + 0 + ], + "vae": [ + "4", + 2 + ] + }, + "class_type": "VAEDecode", + "_meta": { + "title": "VAE Decode" + } + }, + "9": { + "inputs": { + "filename_prefix": "ComfyUI", + "images": [ + "8", + 0 + ] + }, + "class_type": "SaveImage", + "_meta": { + "title": "Save Image" + } + } +} +""" + + +def queue_prompt(prompt, client_id, base_url): + log.info("queue_prompt") + p = {"prompt": prompt, "client_id": client_id} + data = json.dumps(p).encode("utf-8") + req = urllib.request.Request(f"{base_url}/prompt", data=data) + return json.loads(urllib.request.urlopen(req).read()) + + +def get_image(filename, subfolder, folder_type, base_url): + log.info("get_image") + data = {"filename": filename, "subfolder": subfolder, "type": folder_type} + url_values = urllib.parse.urlencode(data) + with urllib.request.urlopen(f"{base_url}/view?{url_values}") as response: + return response.read() + + +def get_image_url(filename, subfolder, folder_type, base_url): + log.info("get_image") + data = {"filename": filename, "subfolder": subfolder, "type": folder_type} + url_values = urllib.parse.urlencode(data) + return f"{base_url}/view?{url_values}" + + +def get_history(prompt_id, base_url): + log.info("get_history") + with urllib.request.urlopen(f"{base_url}/history/{prompt_id}") as response: + return json.loads(response.read()) + + +def get_images(ws, prompt, client_id, base_url): + prompt_id = queue_prompt(prompt, client_id, base_url)["prompt_id"] + output_images = [] + while True: + out = ws.recv() + if isinstance(out, str): + message = json.loads(out) + if message["type"] == "executing": + data = message["data"] + if data["node"] is None and data["prompt_id"] == prompt_id: + break # Execution is done + else: + continue # previews are binary data + + history = get_history(prompt_id, base_url)[prompt_id] + for o in history["outputs"]: + for node_id in history["outputs"]: + node_output = history["outputs"][node_id] + if "images" in node_output: + for image in node_output["images"]: + url = get_image_url( + image["filename"], image["subfolder"], image["type"], base_url + ) + output_images.append({"url": url}) + return {"data": output_images} + + +class ImageGenerationPayload(BaseModel): + prompt: str + negative_prompt: Optional[str] = "" + steps: Optional[int] = None + seed: Optional[int] = None + width: int + height: int + n: int = 1 + cfg_scale: Optional[float] = None + sampler: Optional[str] = None + scheduler: Optional[str] = None + sd3: Optional[bool] = None + + +def comfyui_generate_image( + model: str, payload: ImageGenerationPayload, client_id, base_url +): + ws_url = base_url.replace("http://", "ws://").replace("https://", "wss://") + + comfyui_prompt = json.loads(COMFYUI_DEFAULT_PROMPT) + + if payload.cfg_scale: + comfyui_prompt["3"]["inputs"]["cfg"] = payload.cfg_scale + + if payload.sampler: + comfyui_prompt["3"]["inputs"]["sampler"] = payload.sampler + + if payload.scheduler: + comfyui_prompt["3"]["inputs"]["scheduler"] = payload.scheduler + + if payload.sd3: + comfyui_prompt["5"]["class_type"] = "EmptySD3LatentImage" + + comfyui_prompt["4"]["inputs"]["ckpt_name"] = model + comfyui_prompt["5"]["inputs"]["batch_size"] = payload.n + comfyui_prompt["5"]["inputs"]["width"] = payload.width + comfyui_prompt["5"]["inputs"]["height"] = payload.height + + # set the text prompt for our positive CLIPTextEncode + comfyui_prompt["6"]["inputs"]["text"] = payload.prompt + comfyui_prompt["7"]["inputs"]["text"] = payload.negative_prompt + + if payload.steps: + comfyui_prompt["3"]["inputs"]["steps"] = payload.steps + + comfyui_prompt["3"]["inputs"]["seed"] = ( + payload.seed if payload.seed else random.randint(0, 18446744073709551614) + ) + + try: + ws = websocket.WebSocket() + ws.connect(f"{ws_url}/ws?clientId={client_id}") + log.info("WebSocket connection established.") + except Exception as e: + log.exception(f"Failed to connect to WebSocket server: {e}") + return None + + try: + images = get_images(ws, comfyui_prompt, client_id, base_url) + except Exception as e: + log.exception(f"Error while receiving images: {e}") + images = None + + ws.close() + + return images diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py new file mode 100644 index 0000000000000000000000000000000000000000..0e4d54e465158b100d1963c27fdd2c148f646342 --- /dev/null +++ b/backend/apps/ollama/main.py @@ -0,0 +1,1280 @@ +from fastapi import ( + FastAPI, + Request, + Response, + HTTPException, + Depends, + status, + UploadFile, + File, + BackgroundTasks, +) +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import StreamingResponse +from fastapi.concurrency import run_in_threadpool + +from pydantic import BaseModel, ConfigDict + +import os +import re +import copy +import random +import requests +import json +import uuid +import aiohttp +import asyncio +import logging +import time +from urllib.parse import urlparse +from typing import Optional, List, Union + +from starlette.background import BackgroundTask + +from apps.webui.models.models import Models +from apps.webui.models.users import Users +from constants import ERROR_MESSAGES +from utils.utils import ( + decode_token, + get_current_user, + get_verified_user, + get_admin_user, +) +from utils.task import prompt_template + + +from config import ( + SRC_LOG_LEVELS, + OLLAMA_BASE_URLS, + ENABLE_OLLAMA_API, + AIOHTTP_CLIENT_TIMEOUT, + ENABLE_MODEL_FILTER, + MODEL_FILTER_LIST, + UPLOAD_DIR, + AppConfig, +) +from utils.misc import calculate_sha256, add_or_update_system_message + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["OLLAMA"]) + +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.state.config = AppConfig() + +app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER +app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST + +app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API +app.state.config.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS +app.state.MODELS = {} + + +# TODO: Implement a more intelligent load balancing mechanism for distributing requests among multiple backend instances. +# Current implementation uses a simple round-robin approach (random.choice). Consider incorporating algorithms like weighted round-robin, +# least connections, or least response time for better resource utilization and performance optimization. + + +@app.middleware("http") +async def check_url(request: Request, call_next): + if len(app.state.MODELS) == 0: + await get_all_models() + else: + pass + + response = await call_next(request) + return response + + +@app.head("/") +@app.get("/") +async def get_status(): + return {"status": True} + + +@app.get("/config") +async def get_config(user=Depends(get_admin_user)): + return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API} + + +class OllamaConfigForm(BaseModel): + enable_ollama_api: Optional[bool] = None + + +@app.post("/config/update") +async def update_config(form_data: OllamaConfigForm, user=Depends(get_admin_user)): + app.state.config.ENABLE_OLLAMA_API = form_data.enable_ollama_api + return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API} + + +@app.get("/urls") +async def get_ollama_api_urls(user=Depends(get_admin_user)): + return {"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS} + + +class UrlUpdateForm(BaseModel): + urls: List[str] + + +@app.post("/urls/update") +async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)): + app.state.config.OLLAMA_BASE_URLS = form_data.urls + + log.info(f"app.state.config.OLLAMA_BASE_URLS: {app.state.config.OLLAMA_BASE_URLS}") + return {"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS} + + +async def fetch_url(url): + timeout = aiohttp.ClientTimeout(total=5) + try: + async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session: + async with session.get(url) as response: + return await response.json() + except Exception as e: + # Handle connection error here + log.error(f"Connection error: {e}") + return None + + +async def cleanup_response( + response: Optional[aiohttp.ClientResponse], + session: Optional[aiohttp.ClientSession], +): + if response: + response.close() + if session: + await session.close() + + +async def post_streaming_url(url: str, payload: str, stream: bool = True): + r = None + try: + session = aiohttp.ClientSession( + trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT) + ) + r = await session.post(url, data=payload) + r.raise_for_status() + + if stream: + return StreamingResponse( + r.content, + status_code=r.status, + headers=dict(r.headers), + background=BackgroundTask( + cleanup_response, response=r, session=session + ), + ) + else: + res = await r.json() + await cleanup_response(r, session) + return res + + except Exception as e: + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = await r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status if r else 500, + detail=error_detail, + ) + + +def merge_models_lists(model_lists): + merged_models = {} + + for idx, model_list in enumerate(model_lists): + if model_list is not None: + for model in model_list: + digest = model["digest"] + if digest not in merged_models: + model["urls"] = [idx] + merged_models[digest] = model + else: + merged_models[digest]["urls"].append(idx) + + return list(merged_models.values()) + + +async def get_all_models(): + log.info("get_all_models()") + + if app.state.config.ENABLE_OLLAMA_API: + tasks = [ + fetch_url(f"{url}/api/tags") for url in app.state.config.OLLAMA_BASE_URLS + ] + responses = await asyncio.gather(*tasks) + + models = { + "models": merge_models_lists( + map( + lambda response: response["models"] if response else None, responses + ) + ) + } + + else: + models = {"models": []} + + app.state.MODELS = {model["model"]: model for model in models["models"]} + + return models + + +@app.get("/api/tags") +@app.get("/api/tags/{url_idx}") +async def get_ollama_tags( + url_idx: Optional[int] = None, user=Depends(get_verified_user) +): + if url_idx == None: + models = await get_all_models() + + if app.state.config.ENABLE_MODEL_FILTER: + if user.role == "user": + models["models"] = list( + filter( + lambda model: model["name"] + in app.state.config.MODEL_FILTER_LIST, + models["models"], + ) + ) + return models + return models + else: + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + + r = None + try: + r = requests.request(method="GET", url=f"{url}/api/tags") + r.raise_for_status() + + return r.json() + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +@app.get("/api/version") +@app.get("/api/version/{url_idx}") +async def get_ollama_versions(url_idx: Optional[int] = None): + if app.state.config.ENABLE_OLLAMA_API: + if url_idx == None: + + # returns lowest version + tasks = [ + fetch_url(f"{url}/api/version") + for url in app.state.config.OLLAMA_BASE_URLS + ] + responses = await asyncio.gather(*tasks) + responses = list(filter(lambda x: x is not None, responses)) + + if len(responses) > 0: + lowest_version = min( + responses, + key=lambda x: tuple( + map(int, re.sub(r"^v|-.*", "", x["version"]).split(".")) + ), + ) + + return {"version": lowest_version["version"]} + else: + raise HTTPException( + status_code=500, + detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND, + ) + else: + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + + r = None + try: + r = requests.request(method="GET", url=f"{url}/api/version") + r.raise_for_status() + + return r.json() + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + else: + return {"version": False} + + +class ModelNameForm(BaseModel): + name: str + + +@app.post("/api/pull") +@app.post("/api/pull/{url_idx}") +async def pull_model( + form_data: ModelNameForm, url_idx: int = 0, user=Depends(get_admin_user) +): + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + r = None + + # Admin should be able to pull models from any source + payload = {**form_data.model_dump(exclude_none=True), "insecure": True} + + return await post_streaming_url(f"{url}/api/pull", json.dumps(payload)) + + +class PushModelForm(BaseModel): + name: str + insecure: Optional[bool] = None + stream: Optional[bool] = None + + +@app.delete("/api/push") +@app.delete("/api/push/{url_idx}") +async def push_model( + form_data: PushModelForm, + url_idx: Optional[int] = None, + user=Depends(get_admin_user), +): + if url_idx == None: + if form_data.name in app.state.MODELS: + url_idx = app.state.MODELS[form_data.name]["urls"][0] + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.debug(f"url: {url}") + + return await post_streaming_url( + f"{url}/api/push", form_data.model_dump_json(exclude_none=True).encode() + ) + + +class CreateModelForm(BaseModel): + name: str + modelfile: Optional[str] = None + stream: Optional[bool] = None + path: Optional[str] = None + + +@app.post("/api/create") +@app.post("/api/create/{url_idx}") +async def create_model( + form_data: CreateModelForm, url_idx: int = 0, user=Depends(get_admin_user) +): + log.debug(f"form_data: {form_data}") + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + return await post_streaming_url( + f"{url}/api/create", form_data.model_dump_json(exclude_none=True).encode() + ) + + +class CopyModelForm(BaseModel): + source: str + destination: str + + +@app.post("/api/copy") +@app.post("/api/copy/{url_idx}") +async def copy_model( + form_data: CopyModelForm, + url_idx: Optional[int] = None, + user=Depends(get_admin_user), +): + if url_idx == None: + if form_data.source in app.state.MODELS: + url_idx = app.state.MODELS[form_data.source]["urls"][0] + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.source), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + try: + r = requests.request( + method="POST", + url=f"{url}/api/copy", + data=form_data.model_dump_json(exclude_none=True).encode(), + ) + r.raise_for_status() + + log.debug(f"r.text: {r.text}") + + return True + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +@app.delete("/api/delete") +@app.delete("/api/delete/{url_idx}") +async def delete_model( + form_data: ModelNameForm, + url_idx: Optional[int] = None, + user=Depends(get_admin_user), +): + if url_idx == None: + if form_data.name in app.state.MODELS: + url_idx = app.state.MODELS[form_data.name]["urls"][0] + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + try: + r = requests.request( + method="DELETE", + url=f"{url}/api/delete", + data=form_data.model_dump_json(exclude_none=True).encode(), + ) + r.raise_for_status() + + log.debug(f"r.text: {r.text}") + + return True + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +@app.post("/api/show") +async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_user)): + if form_data.name not in app.state.MODELS: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name), + ) + + url_idx = random.choice(app.state.MODELS[form_data.name]["urls"]) + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + try: + r = requests.request( + method="POST", + url=f"{url}/api/show", + data=form_data.model_dump_json(exclude_none=True).encode(), + ) + r.raise_for_status() + + return r.json() + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +class GenerateEmbeddingsForm(BaseModel): + model: str + prompt: str + options: Optional[dict] = None + keep_alive: Optional[Union[int, str]] = None + + +@app.post("/api/embeddings") +@app.post("/api/embeddings/{url_idx}") +async def generate_embeddings( + form_data: GenerateEmbeddingsForm, + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + if url_idx == None: + model = form_data.model + + if ":" not in model: + model = f"{model}:latest" + + if model in app.state.MODELS: + url_idx = random.choice(app.state.MODELS[model]["urls"]) + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + try: + r = requests.request( + method="POST", + url=f"{url}/api/embeddings", + data=form_data.model_dump_json(exclude_none=True).encode(), + ) + r.raise_for_status() + + return r.json() + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +def generate_ollama_embeddings( + form_data: GenerateEmbeddingsForm, + url_idx: Optional[int] = None, +): + + log.info(f"generate_ollama_embeddings {form_data}") + + if url_idx == None: + model = form_data.model + + if ":" not in model: + model = f"{model}:latest" + + if model in app.state.MODELS: + url_idx = random.choice(app.state.MODELS[model]["urls"]) + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + try: + r = requests.request( + method="POST", + url=f"{url}/api/embeddings", + data=form_data.model_dump_json(exclude_none=True).encode(), + ) + r.raise_for_status() + + data = r.json() + + log.info(f"generate_ollama_embeddings {data}") + + if "embedding" in data: + return data["embedding"] + else: + raise "Something went wrong :/" + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise error_detail + + +class GenerateCompletionForm(BaseModel): + model: str + prompt: str + images: Optional[List[str]] = None + format: Optional[str] = None + options: Optional[dict] = None + system: Optional[str] = None + template: Optional[str] = None + context: Optional[str] = None + stream: Optional[bool] = True + raw: Optional[bool] = None + keep_alive: Optional[Union[int, str]] = None + + +@app.post("/api/generate") +@app.post("/api/generate/{url_idx}") +async def generate_completion( + form_data: GenerateCompletionForm, + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + + if url_idx == None: + model = form_data.model + + if ":" not in model: + model = f"{model}:latest" + + if model in app.state.MODELS: + url_idx = random.choice(app.state.MODELS[model]["urls"]) + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + return await post_streaming_url( + f"{url}/api/generate", form_data.model_dump_json(exclude_none=True).encode() + ) + + +class ChatMessage(BaseModel): + role: str + content: str + images: Optional[List[str]] = None + + +class GenerateChatCompletionForm(BaseModel): + model: str + messages: List[ChatMessage] + format: Optional[str] = None + options: Optional[dict] = None + template: Optional[str] = None + stream: Optional[bool] = None + keep_alive: Optional[Union[int, str]] = None + + +@app.post("/api/chat") +@app.post("/api/chat/{url_idx}") +async def generate_chat_completion( + form_data: GenerateChatCompletionForm, + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + + log.debug( + "form_data.model_dump_json(exclude_none=True).encode(): {0} ".format( + form_data.model_dump_json(exclude_none=True).encode() + ) + ) + + payload = { + **form_data.model_dump(exclude_none=True, exclude=["metadata"]), + } + if "metadata" in payload: + del payload["metadata"] + + model_id = form_data.model + model_info = Models.get_model_by_id(model_id) + + if model_info: + if model_info.base_model_id: + payload["model"] = model_info.base_model_id + + model_info.params = model_info.params.model_dump() + + if model_info.params: + if payload.get("options") is None: + payload["options"] = {} + + if ( + model_info.params.get("mirostat", None) + and payload["options"].get("mirostat") is None + ): + payload["options"]["mirostat"] = model_info.params.get("mirostat", None) + + if ( + model_info.params.get("mirostat_eta", None) + and payload["options"].get("mirostat_eta") is None + ): + payload["options"]["mirostat_eta"] = model_info.params.get( + "mirostat_eta", None + ) + + if ( + model_info.params.get("mirostat_tau", None) + and payload["options"].get("mirostat_tau") is None + ): + payload["options"]["mirostat_tau"] = model_info.params.get( + "mirostat_tau", None + ) + + if ( + model_info.params.get("num_ctx", None) + and payload["options"].get("num_ctx") is None + ): + payload["options"]["num_ctx"] = model_info.params.get("num_ctx", None) + + if ( + model_info.params.get("num_batch", None) + and payload["options"].get("num_batch") is None + ): + payload["options"]["num_batch"] = model_info.params.get( + "num_batch", None + ) + + if ( + model_info.params.get("num_keep", None) + and payload["options"].get("num_keep") is None + ): + payload["options"]["num_keep"] = model_info.params.get("num_keep", None) + + if ( + model_info.params.get("repeat_last_n", None) + and payload["options"].get("repeat_last_n") is None + ): + payload["options"]["repeat_last_n"] = model_info.params.get( + "repeat_last_n", None + ) + + if ( + model_info.params.get("frequency_penalty", None) + and payload["options"].get("frequency_penalty") is None + ): + payload["options"]["repeat_penalty"] = model_info.params.get( + "frequency_penalty", None + ) + + if ( + model_info.params.get("temperature", None) is not None + and payload["options"].get("temperature") is None + ): + payload["options"]["temperature"] = model_info.params.get( + "temperature", None + ) + + if ( + model_info.params.get("seed", None) is not None + and payload["options"].get("seed") is None + ): + payload["options"]["seed"] = model_info.params.get("seed", None) + + if ( + model_info.params.get("stop", None) + and payload["options"].get("stop") is None + ): + payload["options"]["stop"] = ( + [ + bytes(stop, "utf-8").decode("unicode_escape") + for stop in model_info.params["stop"] + ] + if model_info.params.get("stop", None) + else None + ) + + if ( + model_info.params.get("tfs_z", None) + and payload["options"].get("tfs_z") is None + ): + payload["options"]["tfs_z"] = model_info.params.get("tfs_z", None) + + if ( + model_info.params.get("max_tokens", None) + and payload["options"].get("max_tokens") is None + ): + payload["options"]["num_predict"] = model_info.params.get( + "max_tokens", None + ) + + if ( + model_info.params.get("top_k", None) + and payload["options"].get("top_k") is None + ): + payload["options"]["top_k"] = model_info.params.get("top_k", None) + + if ( + model_info.params.get("top_p", None) + and payload["options"].get("top_p") is None + ): + payload["options"]["top_p"] = model_info.params.get("top_p", None) + + if ( + model_info.params.get("use_mmap", None) + and payload["options"].get("use_mmap") is None + ): + payload["options"]["use_mmap"] = model_info.params.get("use_mmap", None) + + if ( + model_info.params.get("use_mlock", None) + and payload["options"].get("use_mlock") is None + ): + payload["options"]["use_mlock"] = model_info.params.get( + "use_mlock", None + ) + + if ( + model_info.params.get("num_thread", None) + and payload["options"].get("num_thread") is None + ): + payload["options"]["num_thread"] = model_info.params.get( + "num_thread", None + ) + + system = model_info.params.get("system", None) + if system: + system = prompt_template( + system, + **( + { + "user_name": user.name, + "user_location": ( + user.info.get("location") if user.info else None + ), + } + if user + else {} + ), + ) + + if payload.get("messages"): + payload["messages"] = add_or_update_system_message( + system, payload["messages"] + ) + + if url_idx == None: + if ":" not in payload["model"]: + payload["model"] = f"{payload['model']}:latest" + + if payload["model"] in app.state.MODELS: + url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"]) + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + log.debug(payload) + + return await post_streaming_url(f"{url}/api/chat", json.dumps(payload)) + + +# TODO: we should update this part once Ollama supports other types +class OpenAIChatMessageContent(BaseModel): + type: str + model_config = ConfigDict(extra="allow") + + +class OpenAIChatMessage(BaseModel): + role: str + content: Union[str, OpenAIChatMessageContent] + + model_config = ConfigDict(extra="allow") + + +class OpenAIChatCompletionForm(BaseModel): + model: str + messages: List[OpenAIChatMessage] + + model_config = ConfigDict(extra="allow") + + +@app.post("/v1/chat/completions") +@app.post("/v1/chat/completions/{url_idx}") +async def generate_openai_chat_completion( + form_data: dict, + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + form_data = OpenAIChatCompletionForm(**form_data) + payload = {**form_data.model_dump(exclude_none=True, exclude=["metadata"])} + + if "metadata" in payload: + del payload["metadata"] + + model_id = form_data.model + model_info = Models.get_model_by_id(model_id) + + if model_info: + if model_info.base_model_id: + payload["model"] = model_info.base_model_id + + model_info.params = model_info.params.model_dump() + + if model_info.params: + payload["temperature"] = model_info.params.get("temperature", None) + payload["top_p"] = model_info.params.get("top_p", None) + payload["max_tokens"] = model_info.params.get("max_tokens", None) + payload["frequency_penalty"] = model_info.params.get( + "frequency_penalty", None + ) + payload["seed"] = model_info.params.get("seed", None) + payload["stop"] = ( + [ + bytes(stop, "utf-8").decode("unicode_escape") + for stop in model_info.params["stop"] + ] + if model_info.params.get("stop", None) + else None + ) + + system = model_info.params.get("system", None) + + if system: + system = prompt_template( + system, + **( + { + "user_name": user.name, + "user_location": ( + user.info.get("location") if user.info else None + ), + } + if user + else {} + ), + ) + # Check if the payload already has a system message + # If not, add a system message to the payload + if payload.get("messages"): + for message in payload["messages"]: + if message.get("role") == "system": + message["content"] = system + message["content"] + break + else: + payload["messages"].insert( + 0, + { + "role": "system", + "content": system, + }, + ) + + if url_idx == None: + if ":" not in payload["model"]: + payload["model"] = f"{payload['model']}:latest" + + if payload["model"] in app.state.MODELS: + url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"]) + else: + raise HTTPException( + status_code=400, + detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model), + ) + + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + log.info(f"url: {url}") + + return await post_streaming_url( + f"{url}/v1/chat/completions", + json.dumps(payload), + stream=payload.get("stream", False), + ) + + +@app.get("/v1/models") +@app.get("/v1/models/{url_idx}") +async def get_openai_models( + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + if url_idx == None: + models = await get_all_models() + + if app.state.config.ENABLE_MODEL_FILTER: + if user.role == "user": + models["models"] = list( + filter( + lambda model: model["name"] + in app.state.config.MODEL_FILTER_LIST, + models["models"], + ) + ) + + return { + "data": [ + { + "id": model["model"], + "object": "model", + "created": int(time.time()), + "owned_by": "openai", + } + for model in models["models"] + ], + "object": "list", + } + + else: + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + try: + r = requests.request(method="GET", url=f"{url}/api/tags") + r.raise_for_status() + + models = r.json() + + return { + "data": [ + { + "id": model["model"], + "object": "model", + "created": int(time.time()), + "owned_by": "openai", + } + for model in models["models"] + ], + "object": "list", + } + + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +class UrlForm(BaseModel): + url: str + + +class UploadBlobForm(BaseModel): + filename: str + + +def parse_huggingface_url(hf_url): + try: + # Parse the URL + parsed_url = urlparse(hf_url) + + # Get the path and split it into components + path_components = parsed_url.path.split("/") + + # Extract the desired output + user_repo = "/".join(path_components[1:3]) + model_file = path_components[-1] + + return model_file + except ValueError: + return None + + +async def download_file_stream( + ollama_url, file_url, file_path, file_name, chunk_size=1024 * 1024 +): + done = False + + if os.path.exists(file_path): + current_size = os.path.getsize(file_path) + else: + current_size = 0 + + headers = {"Range": f"bytes={current_size}-"} if current_size > 0 else {} + + timeout = aiohttp.ClientTimeout(total=600) # Set the timeout + + async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session: + async with session.get(file_url, headers=headers) as response: + total_size = int(response.headers.get("content-length", 0)) + current_size + + with open(file_path, "ab+") as file: + async for data in response.content.iter_chunked(chunk_size): + current_size += len(data) + file.write(data) + + done = current_size == total_size + progress = round((current_size / total_size) * 100, 2) + + yield f'data: {{"progress": {progress}, "completed": {current_size}, "total": {total_size}}}\n\n' + + if done: + file.seek(0) + hashed = calculate_sha256(file) + file.seek(0) + + url = f"{ollama_url}/api/blobs/sha256:{hashed}" + response = requests.post(url, data=file) + + if response.ok: + res = { + "done": done, + "blob": f"sha256:{hashed}", + "name": file_name, + } + os.remove(file_path) + + yield f"data: {json.dumps(res)}\n\n" + else: + raise "Ollama: Could not create blob, Please try again." + + +# url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf" +@app.post("/models/download") +@app.post("/models/download/{url_idx}") +async def download_model( + form_data: UrlForm, + url_idx: Optional[int] = None, + user=Depends(get_admin_user), +): + + allowed_hosts = ["https://huggingface.co/", "https://github.com/"] + + if not any(form_data.url.startswith(host) for host in allowed_hosts): + raise HTTPException( + status_code=400, + detail="Invalid file_url. Only URLs from allowed hosts are permitted.", + ) + + if url_idx == None: + url_idx = 0 + url = app.state.config.OLLAMA_BASE_URLS[url_idx] + + file_name = parse_huggingface_url(form_data.url) + + if file_name: + file_path = f"{UPLOAD_DIR}/{file_name}" + + return StreamingResponse( + download_file_stream(url, form_data.url, file_path, file_name), + ) + else: + return None + + +@app.post("/models/upload") +@app.post("/models/upload/{url_idx}") +def upload_model( + file: UploadFile = File(...), + url_idx: Optional[int] = None, + user=Depends(get_admin_user), +): + if url_idx == None: + url_idx = 0 + ollama_url = app.state.config.OLLAMA_BASE_URLS[url_idx] + + file_path = f"{UPLOAD_DIR}/{file.filename}" + + # Save file in chunks + with open(file_path, "wb+") as f: + for chunk in file.file: + f.write(chunk) + + def file_process_stream(): + nonlocal ollama_url + total_size = os.path.getsize(file_path) + chunk_size = 1024 * 1024 + try: + with open(file_path, "rb") as f: + total = 0 + done = False + + while not done: + chunk = f.read(chunk_size) + if not chunk: + done = True + continue + + total += len(chunk) + progress = round((total / total_size) * 100, 2) + + res = { + "progress": progress, + "total": total_size, + "completed": total, + } + yield f"data: {json.dumps(res)}\n\n" + + if done: + f.seek(0) + hashed = calculate_sha256(f) + f.seek(0) + + url = f"{ollama_url}/api/blobs/sha256:{hashed}" + response = requests.post(url, data=f) + + if response.ok: + res = { + "done": done, + "blob": f"sha256:{hashed}", + "name": file.filename, + } + os.remove(file_path) + yield f"data: {json.dumps(res)}\n\n" + else: + raise Exception( + "Ollama: Could not create blob, Please try again." + ) + + except Exception as e: + res = {"error": str(e)} + yield f"data: {json.dumps(res)}\n\n" + + return StreamingResponse(file_process_stream(), media_type="text/event-stream") diff --git a/backend/apps/openai/main.py b/backend/apps/openai/main.py new file mode 100644 index 0000000000000000000000000000000000000000..c712709a5cca8e00fba54032b0956ba73efbb533 --- /dev/null +++ b/backend/apps/openai/main.py @@ -0,0 +1,579 @@ +from fastapi import FastAPI, Request, Response, HTTPException, Depends +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import StreamingResponse, JSONResponse, FileResponse + +import requests +import aiohttp +import asyncio +import json +import logging + +from pydantic import BaseModel +from starlette.background import BackgroundTask + +from apps.webui.models.models import Models +from apps.webui.models.users import Users +from constants import ERROR_MESSAGES +from utils.utils import ( + decode_token, + get_verified_user, + get_verified_user, + get_admin_user, +) +from utils.task import prompt_template +from utils.misc import add_or_update_system_message + +from config import ( + SRC_LOG_LEVELS, + ENABLE_OPENAI_API, + AIOHTTP_CLIENT_TIMEOUT, + OPENAI_API_BASE_URLS, + OPENAI_API_KEYS, + CACHE_DIR, + ENABLE_MODEL_FILTER, + MODEL_FILTER_LIST, + AppConfig, +) +from typing import List, Optional + + +import hashlib +from pathlib import Path + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["OPENAI"]) + +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +app.state.config = AppConfig() + +app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER +app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST + +app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API +app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS +app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS + +app.state.MODELS = {} + + +@app.middleware("http") +async def check_url(request: Request, call_next): + if len(app.state.MODELS) == 0: + await get_all_models() + else: + pass + + response = await call_next(request) + return response + + +@app.get("/config") +async def get_config(user=Depends(get_admin_user)): + return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API} + + +class OpenAIConfigForm(BaseModel): + enable_openai_api: Optional[bool] = None + + +@app.post("/config/update") +async def update_config(form_data: OpenAIConfigForm, user=Depends(get_admin_user)): + app.state.config.ENABLE_OPENAI_API = form_data.enable_openai_api + return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API} + + +class UrlsUpdateForm(BaseModel): + urls: List[str] + + +class KeysUpdateForm(BaseModel): + keys: List[str] + + +@app.get("/urls") +async def get_openai_urls(user=Depends(get_admin_user)): + return {"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS} + + +@app.post("/urls/update") +async def update_openai_urls(form_data: UrlsUpdateForm, user=Depends(get_admin_user)): + await get_all_models() + app.state.config.OPENAI_API_BASE_URLS = form_data.urls + return {"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS} + + +@app.get("/keys") +async def get_openai_keys(user=Depends(get_admin_user)): + return {"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS} + + +@app.post("/keys/update") +async def update_openai_key(form_data: KeysUpdateForm, user=Depends(get_admin_user)): + app.state.config.OPENAI_API_KEYS = form_data.keys + return {"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS} + + +@app.post("/audio/speech") +async def speech(request: Request, user=Depends(get_verified_user)): + idx = None + try: + idx = app.state.config.OPENAI_API_BASE_URLS.index("https://api.openai.com/v1") + body = await request.body() + name = hashlib.sha256(body).hexdigest() + + SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/") + SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True) + file_path = SPEECH_CACHE_DIR.joinpath(f"{name}.mp3") + file_body_path = SPEECH_CACHE_DIR.joinpath(f"{name}.json") + + # Check if the file already exists in the cache + if file_path.is_file(): + return FileResponse(file_path) + + headers = {} + headers["Authorization"] = f"Bearer {app.state.config.OPENAI_API_KEYS[idx]}" + headers["Content-Type"] = "application/json" + if "openrouter.ai" in app.state.config.OPENAI_API_BASE_URLS[idx]: + headers["HTTP-Referer"] = "https://openwebui.com/" + headers["X-Title"] = "Open WebUI" + r = None + try: + r = requests.post( + url=f"{app.state.config.OPENAI_API_BASE_URLS[idx]}/audio/speech", + data=body, + headers=headers, + stream=True, + ) + + r.raise_for_status() + + # Save the streaming content to a file + with open(file_path, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + with open(file_body_path, "w") as f: + json.dump(json.loads(body.decode("utf-8")), f) + + # Return the saved file + return FileResponse(file_path) + + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"External: {res['error']}" + except: + error_detail = f"External: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, detail=error_detail + ) + + except ValueError: + raise HTTPException(status_code=401, detail=ERROR_MESSAGES.OPENAI_NOT_FOUND) + + +async def fetch_url(url, key): + timeout = aiohttp.ClientTimeout(total=5) + try: + headers = {"Authorization": f"Bearer {key}"} + async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session: + async with session.get(url, headers=headers) as response: + return await response.json() + except Exception as e: + # Handle connection error here + log.error(f"Connection error: {e}") + return None + + +async def cleanup_response( + response: Optional[aiohttp.ClientResponse], + session: Optional[aiohttp.ClientSession], +): + if response: + response.close() + if session: + await session.close() + + +def merge_models_lists(model_lists): + log.debug(f"merge_models_lists {model_lists}") + merged_list = [] + + for idx, models in enumerate(model_lists): + if models is not None and "error" not in models: + merged_list.extend( + [ + { + **model, + "name": model.get("name", model["id"]), + "owned_by": "openai", + "openai": model, + "urlIdx": idx, + } + for model in models + if "api.openai.com" + not in app.state.config.OPENAI_API_BASE_URLS[idx] + or "gpt" in model["id"] + ] + ) + + return merged_list + + +async def get_all_models(raw: bool = False): + log.info("get_all_models()") + + if ( + len(app.state.config.OPENAI_API_KEYS) == 1 + and app.state.config.OPENAI_API_KEYS[0] == "" + ) or not app.state.config.ENABLE_OPENAI_API: + models = {"data": []} + else: + # Check if API KEYS length is same than API URLS length + if len(app.state.config.OPENAI_API_KEYS) != len( + app.state.config.OPENAI_API_BASE_URLS + ): + # if there are more keys than urls, remove the extra keys + if len(app.state.config.OPENAI_API_KEYS) > len( + app.state.config.OPENAI_API_BASE_URLS + ): + app.state.config.OPENAI_API_KEYS = app.state.config.OPENAI_API_KEYS[ + : len(app.state.config.OPENAI_API_BASE_URLS) + ] + # if there are more urls than keys, add empty keys + else: + app.state.config.OPENAI_API_KEYS += [ + "" + for _ in range( + len(app.state.config.OPENAI_API_BASE_URLS) + - len(app.state.config.OPENAI_API_KEYS) + ) + ] + + tasks = [ + fetch_url(f"{url}/models", app.state.config.OPENAI_API_KEYS[idx]) + for idx, url in enumerate(app.state.config.OPENAI_API_BASE_URLS) + ] + + responses = await asyncio.gather(*tasks) + log.debug(f"get_all_models:responses() {responses}") + + if raw: + return responses + + models = { + "data": merge_models_lists( + list( + map( + lambda response: ( + response["data"] + if (response and "data" in response) + else (response if isinstance(response, list) else None) + ), + responses, + ) + ) + ) + } + + log.debug(f"models: {models}") + app.state.MODELS = {model["id"]: model for model in models["data"]} + + return models + + +@app.get("/models") +@app.get("/models/{url_idx}") +async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_user)): + if url_idx == None: + models = await get_all_models() + if app.state.config.ENABLE_MODEL_FILTER: + if user.role == "user": + models["data"] = list( + filter( + lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST, + models["data"], + ) + ) + return models + return models + else: + url = app.state.config.OPENAI_API_BASE_URLS[url_idx] + key = app.state.config.OPENAI_API_KEYS[url_idx] + + headers = {} + headers["Authorization"] = f"Bearer {key}" + headers["Content-Type"] = "application/json" + + r = None + + try: + r = requests.request(method="GET", url=f"{url}/models", headers=headers) + r.raise_for_status() + + response_data = r.json() + if "api.openai.com" in url: + response_data["data"] = list( + filter(lambda model: "gpt" in model["id"], response_data["data"]) + ) + + return response_data + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"External: {res['error']}" + except: + error_detail = f"External: {e}" + + raise HTTPException( + status_code=r.status_code if r else 500, + detail=error_detail, + ) + + +@app.post("/chat/completions") +@app.post("/chat/completions/{url_idx}") +async def generate_chat_completion( + form_data: dict, + url_idx: Optional[int] = None, + user=Depends(get_verified_user), +): + idx = 0 + payload = {**form_data} + if "metadata" in payload: + del payload["metadata"] + + model_id = form_data.get("model") + model_info = Models.get_model_by_id(model_id) + + if model_info: + if model_info.base_model_id: + payload["model"] = model_info.base_model_id + + model_info.params = model_info.params.model_dump() + + if model_info.params: + if ( + model_info.params.get("temperature", None) is not None + and payload.get("temperature") is None + ): + payload["temperature"] = float(model_info.params.get("temperature")) + + if model_info.params.get("top_p", None) and payload.get("top_p") is None: + payload["top_p"] = int(model_info.params.get("top_p", None)) + + if ( + model_info.params.get("max_tokens", None) + and payload.get("max_tokens") is None + ): + payload["max_tokens"] = int(model_info.params.get("max_tokens", None)) + + if ( + model_info.params.get("frequency_penalty", None) + and payload.get("frequency_penalty") is None + ): + payload["frequency_penalty"] = int( + model_info.params.get("frequency_penalty", None) + ) + + if ( + model_info.params.get("seed", None) is not None + and payload.get("seed") is None + ): + payload["seed"] = model_info.params.get("seed", None) + + if model_info.params.get("stop", None) and payload.get("stop") is None: + payload["stop"] = ( + [ + bytes(stop, "utf-8").decode("unicode_escape") + for stop in model_info.params["stop"] + ] + if model_info.params.get("stop", None) + else None + ) + + system = model_info.params.get("system", None) + if system: + system = prompt_template( + system, + **( + { + "user_name": user.name, + "user_location": ( + user.info.get("location") if user.info else None + ), + } + if user + else {} + ), + ) + if payload.get("messages"): + payload["messages"] = add_or_update_system_message( + system, payload["messages"] + ) + + else: + pass + + model = app.state.MODELS[payload.get("model")] + idx = model["urlIdx"] + + if "pipeline" in model and model.get("pipeline"): + payload["user"] = { + "name": user.name, + "id": user.id, + "email": user.email, + "role": user.role, + } + + # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000 + # This is a workaround until OpenAI fixes the issue with this model + if payload.get("model") == "gpt-4-vision-preview": + if "max_tokens" not in payload: + payload["max_tokens"] = 4000 + log.debug("Modified payload:", payload) + + # Convert the modified body back to JSON + payload = json.dumps(payload) + + log.debug(payload) + + url = app.state.config.OPENAI_API_BASE_URLS[idx] + key = app.state.config.OPENAI_API_KEYS[idx] + + headers = {} + headers["Authorization"] = f"Bearer {key}" + headers["Content-Type"] = "application/json" + if "openrouter.ai" in app.state.config.OPENAI_API_BASE_URLS[idx]: + headers["HTTP-Referer"] = "https://openwebui.com/" + headers["X-Title"] = "Open WebUI" + + r = None + session = None + streaming = False + + try: + session = aiohttp.ClientSession( + trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT) + ) + r = await session.request( + method="POST", + url=f"{url}/chat/completions", + data=payload, + headers=headers, + ) + + r.raise_for_status() + + # Check if response is SSE + if "text/event-stream" in r.headers.get("Content-Type", ""): + streaming = True + return StreamingResponse( + r.content, + status_code=r.status, + headers=dict(r.headers), + background=BackgroundTask( + cleanup_response, response=r, session=session + ), + ) + else: + response_data = await r.json() + return response_data + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = await r.json() + print(res) + if "error" in res: + error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}" + except: + error_detail = f"External: {e}" + raise HTTPException(status_code=r.status if r else 500, detail=error_detail) + finally: + if not streaming and session: + if r: + r.close() + await session.close() + + +@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"]) +async def proxy(path: str, request: Request, user=Depends(get_verified_user)): + idx = 0 + + body = await request.body() + + url = app.state.config.OPENAI_API_BASE_URLS[idx] + key = app.state.config.OPENAI_API_KEYS[idx] + + target_url = f"{url}/{path}" + + headers = {} + headers["Authorization"] = f"Bearer {key}" + headers["Content-Type"] = "application/json" + + r = None + session = None + streaming = False + + try: + session = aiohttp.ClientSession(trust_env=True) + r = await session.request( + method=request.method, + url=target_url, + data=body, + headers=headers, + ) + + r.raise_for_status() + + # Check if response is SSE + if "text/event-stream" in r.headers.get("Content-Type", ""): + streaming = True + return StreamingResponse( + r.content, + status_code=r.status, + headers=dict(r.headers), + background=BackgroundTask( + cleanup_response, response=r, session=session + ), + ) + else: + response_data = await r.json() + return response_data + except Exception as e: + log.exception(e) + error_detail = "Open WebUI: Server Connection Error" + if r is not None: + try: + res = await r.json() + print(res) + if "error" in res: + error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}" + except: + error_detail = f"External: {e}" + raise HTTPException(status_code=r.status if r else 500, detail=error_detail) + finally: + if not streaming and session: + if r: + r.close() + await session.close() diff --git a/backend/apps/rag/main.py b/backend/apps/rag/main.py new file mode 100644 index 0000000000000000000000000000000000000000..dc6b8830efec53d1d153ecf165852a1c3820b85a --- /dev/null +++ b/backend/apps/rag/main.py @@ -0,0 +1,1463 @@ +from fastapi import ( + FastAPI, + Depends, + HTTPException, + status, + UploadFile, + File, + Form, +) +from fastapi.middleware.cors import CORSMiddleware +import requests +import os, shutil, logging, re +from datetime import datetime + +from pathlib import Path +from typing import List, Union, Sequence, Iterator, Any + +from chromadb.utils.batch_utils import create_batches +from langchain_core.documents import Document + +from langchain_community.document_loaders import ( + WebBaseLoader, + TextLoader, + PyPDFLoader, + CSVLoader, + BSHTMLLoader, + Docx2txtLoader, + UnstructuredEPubLoader, + UnstructuredWordDocumentLoader, + UnstructuredMarkdownLoader, + UnstructuredXMLLoader, + UnstructuredRSTLoader, + UnstructuredExcelLoader, + UnstructuredPowerPointLoader, + YoutubeLoader, + OutlookMessageLoader, +) +from langchain.text_splitter import RecursiveCharacterTextSplitter + +import validators +import urllib.parse +import socket + + +from pydantic import BaseModel +from typing import Optional +import mimetypes +import uuid +import json + +from apps.webui.models.documents import ( + Documents, + DocumentForm, + DocumentResponse, +) +from apps.webui.models.files import ( + Files, +) + +from apps.rag.utils import ( + get_model_path, + get_embedding_function, + query_doc, + query_doc_with_hybrid_search, + query_collection, + query_collection_with_hybrid_search, +) + +from apps.rag.search.brave import search_brave +from apps.rag.search.google_pse import search_google_pse +from apps.rag.search.main import SearchResult +from apps.rag.search.searxng import search_searxng +from apps.rag.search.serper import search_serper +from apps.rag.search.serpstack import search_serpstack +from apps.rag.search.serply import search_serply +from apps.rag.search.duckduckgo import search_duckduckgo +from apps.rag.search.tavily import search_tavily +from apps.rag.search.jina_search import search_jina + +from utils.misc import ( + calculate_sha256, + calculate_sha256_string, + sanitize_filename, + extract_folders_after_data_docs, +) +from utils.utils import get_verified_user, get_admin_user + +from config import ( + AppConfig, + ENV, + SRC_LOG_LEVELS, + UPLOAD_DIR, + DOCS_DIR, + CONTENT_EXTRACTION_ENGINE, + TIKA_SERVER_URL, + RAG_TOP_K, + RAG_RELEVANCE_THRESHOLD, + RAG_EMBEDDING_ENGINE, + RAG_EMBEDDING_MODEL, + RAG_EMBEDDING_MODEL_AUTO_UPDATE, + RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE, + ENABLE_RAG_HYBRID_SEARCH, + ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION, + RAG_RERANKING_MODEL, + PDF_EXTRACT_IMAGES, + RAG_RERANKING_MODEL_AUTO_UPDATE, + RAG_RERANKING_MODEL_TRUST_REMOTE_CODE, + RAG_OPENAI_API_BASE_URL, + RAG_OPENAI_API_KEY, + DEVICE_TYPE, + CHROMA_CLIENT, + CHUNK_SIZE, + CHUNK_OVERLAP, + RAG_TEMPLATE, + ENABLE_RAG_LOCAL_WEB_FETCH, + YOUTUBE_LOADER_LANGUAGE, + ENABLE_RAG_WEB_SEARCH, + RAG_WEB_SEARCH_ENGINE, + RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + SEARXNG_QUERY_URL, + GOOGLE_PSE_API_KEY, + GOOGLE_PSE_ENGINE_ID, + BRAVE_SEARCH_API_KEY, + SERPSTACK_API_KEY, + SERPSTACK_HTTPS, + SERPER_API_KEY, + SERPLY_API_KEY, + TAVILY_API_KEY, + RAG_WEB_SEARCH_RESULT_COUNT, + RAG_WEB_SEARCH_CONCURRENT_REQUESTS, + RAG_EMBEDDING_OPENAI_BATCH_SIZE, +) + +from constants import ERROR_MESSAGES + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + +app = FastAPI() + +app.state.config = AppConfig() + +app.state.config.TOP_K = RAG_TOP_K +app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD + +app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH +app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = ( + ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION +) + +app.state.config.CONTENT_EXTRACTION_ENGINE = CONTENT_EXTRACTION_ENGINE +app.state.config.TIKA_SERVER_URL = TIKA_SERVER_URL + +app.state.config.CHUNK_SIZE = CHUNK_SIZE +app.state.config.CHUNK_OVERLAP = CHUNK_OVERLAP + +app.state.config.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE +app.state.config.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL +app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE = RAG_EMBEDDING_OPENAI_BATCH_SIZE +app.state.config.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL +app.state.config.RAG_TEMPLATE = RAG_TEMPLATE + + +app.state.config.OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL +app.state.config.OPENAI_API_KEY = RAG_OPENAI_API_KEY + +app.state.config.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES + + +app.state.config.YOUTUBE_LOADER_LANGUAGE = YOUTUBE_LOADER_LANGUAGE +app.state.YOUTUBE_LOADER_TRANSLATION = None + + +app.state.config.ENABLE_RAG_WEB_SEARCH = ENABLE_RAG_WEB_SEARCH +app.state.config.RAG_WEB_SEARCH_ENGINE = RAG_WEB_SEARCH_ENGINE +app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = RAG_WEB_SEARCH_DOMAIN_FILTER_LIST + +app.state.config.SEARXNG_QUERY_URL = SEARXNG_QUERY_URL +app.state.config.GOOGLE_PSE_API_KEY = GOOGLE_PSE_API_KEY +app.state.config.GOOGLE_PSE_ENGINE_ID = GOOGLE_PSE_ENGINE_ID +app.state.config.BRAVE_SEARCH_API_KEY = BRAVE_SEARCH_API_KEY +app.state.config.SERPSTACK_API_KEY = SERPSTACK_API_KEY +app.state.config.SERPSTACK_HTTPS = SERPSTACK_HTTPS +app.state.config.SERPER_API_KEY = SERPER_API_KEY +app.state.config.SERPLY_API_KEY = SERPLY_API_KEY +app.state.config.TAVILY_API_KEY = TAVILY_API_KEY +app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = RAG_WEB_SEARCH_RESULT_COUNT +app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = RAG_WEB_SEARCH_CONCURRENT_REQUESTS + + +def update_embedding_model( + embedding_model: str, + update_model: bool = False, +): + if embedding_model and app.state.config.RAG_EMBEDDING_ENGINE == "": + import sentence_transformers + + app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer( + get_model_path(embedding_model, update_model), + device=DEVICE_TYPE, + trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE, + ) + else: + app.state.sentence_transformer_ef = None + + +def update_reranking_model( + reranking_model: str, + update_model: bool = False, +): + if reranking_model: + import sentence_transformers + + app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder( + get_model_path(reranking_model, update_model), + device=DEVICE_TYPE, + trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE, + ) + else: + app.state.sentence_transformer_rf = None + + +update_embedding_model( + app.state.config.RAG_EMBEDDING_MODEL, + RAG_EMBEDDING_MODEL_AUTO_UPDATE, +) + +update_reranking_model( + app.state.config.RAG_RERANKING_MODEL, + RAG_RERANKING_MODEL_AUTO_UPDATE, +) + + +app.state.EMBEDDING_FUNCTION = get_embedding_function( + app.state.config.RAG_EMBEDDING_ENGINE, + app.state.config.RAG_EMBEDDING_MODEL, + app.state.sentence_transformer_ef, + app.state.config.OPENAI_API_KEY, + app.state.config.OPENAI_API_BASE_URL, + app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, +) + +origins = ["*"] + + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +class CollectionNameForm(BaseModel): + collection_name: Optional[str] = "test" + + +class UrlForm(CollectionNameForm): + url: str + + +class SearchForm(CollectionNameForm): + query: str + + +@app.get("/") +async def get_status(): + return { + "status": True, + "chunk_size": app.state.config.CHUNK_SIZE, + "chunk_overlap": app.state.config.CHUNK_OVERLAP, + "template": app.state.config.RAG_TEMPLATE, + "embedding_engine": app.state.config.RAG_EMBEDDING_ENGINE, + "embedding_model": app.state.config.RAG_EMBEDDING_MODEL, + "reranking_model": app.state.config.RAG_RERANKING_MODEL, + "openai_batch_size": app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, + } + + +@app.get("/embedding") +async def get_embedding_config(user=Depends(get_admin_user)): + return { + "status": True, + "embedding_engine": app.state.config.RAG_EMBEDDING_ENGINE, + "embedding_model": app.state.config.RAG_EMBEDDING_MODEL, + "openai_config": { + "url": app.state.config.OPENAI_API_BASE_URL, + "key": app.state.config.OPENAI_API_KEY, + "batch_size": app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, + }, + } + + +@app.get("/reranking") +async def get_reraanking_config(user=Depends(get_admin_user)): + return { + "status": True, + "reranking_model": app.state.config.RAG_RERANKING_MODEL, + } + + +class OpenAIConfigForm(BaseModel): + url: str + key: str + batch_size: Optional[int] = None + + +class EmbeddingModelUpdateForm(BaseModel): + openai_config: Optional[OpenAIConfigForm] = None + embedding_engine: str + embedding_model: str + + +@app.post("/embedding/update") +async def update_embedding_config( + form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user) +): + log.info( + f"Updating embedding model: {app.state.config.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}" + ) + try: + app.state.config.RAG_EMBEDDING_ENGINE = form_data.embedding_engine + app.state.config.RAG_EMBEDDING_MODEL = form_data.embedding_model + + if app.state.config.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]: + if form_data.openai_config is not None: + app.state.config.OPENAI_API_BASE_URL = form_data.openai_config.url + app.state.config.OPENAI_API_KEY = form_data.openai_config.key + app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE = ( + form_data.openai_config.batch_size + if form_data.openai_config.batch_size + else 1 + ) + + update_embedding_model(app.state.config.RAG_EMBEDDING_MODEL) + + app.state.EMBEDDING_FUNCTION = get_embedding_function( + app.state.config.RAG_EMBEDDING_ENGINE, + app.state.config.RAG_EMBEDDING_MODEL, + app.state.sentence_transformer_ef, + app.state.config.OPENAI_API_KEY, + app.state.config.OPENAI_API_BASE_URL, + app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, + ) + + return { + "status": True, + "embedding_engine": app.state.config.RAG_EMBEDDING_ENGINE, + "embedding_model": app.state.config.RAG_EMBEDDING_MODEL, + "openai_config": { + "url": app.state.config.OPENAI_API_BASE_URL, + "key": app.state.config.OPENAI_API_KEY, + "batch_size": app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, + }, + } + except Exception as e: + log.exception(f"Problem updating embedding model: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +class RerankingModelUpdateForm(BaseModel): + reranking_model: str + + +@app.post("/reranking/update") +async def update_reranking_config( + form_data: RerankingModelUpdateForm, user=Depends(get_admin_user) +): + log.info( + f"Updating reranking model: {app.state.config.RAG_RERANKING_MODEL} to {form_data.reranking_model}" + ) + try: + app.state.config.RAG_RERANKING_MODEL = form_data.reranking_model + + update_reranking_model(app.state.config.RAG_RERANKING_MODEL), True + + return { + "status": True, + "reranking_model": app.state.config.RAG_RERANKING_MODEL, + } + except Exception as e: + log.exception(f"Problem updating reranking model: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +@app.get("/config") +async def get_rag_config(user=Depends(get_admin_user)): + return { + "status": True, + "pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES, + "content_extraction": { + "engine": app.state.config.CONTENT_EXTRACTION_ENGINE, + "tika_server_url": app.state.config.TIKA_SERVER_URL, + }, + "chunk": { + "chunk_size": app.state.config.CHUNK_SIZE, + "chunk_overlap": app.state.config.CHUNK_OVERLAP, + }, + "youtube": { + "language": app.state.config.YOUTUBE_LOADER_LANGUAGE, + "translation": app.state.YOUTUBE_LOADER_TRANSLATION, + }, + "web": { + "ssl_verification": app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION, + "search": { + "enabled": app.state.config.ENABLE_RAG_WEB_SEARCH, + "engine": app.state.config.RAG_WEB_SEARCH_ENGINE, + "searxng_query_url": app.state.config.SEARXNG_QUERY_URL, + "google_pse_api_key": app.state.config.GOOGLE_PSE_API_KEY, + "google_pse_engine_id": app.state.config.GOOGLE_PSE_ENGINE_ID, + "brave_search_api_key": app.state.config.BRAVE_SEARCH_API_KEY, + "serpstack_api_key": app.state.config.SERPSTACK_API_KEY, + "serpstack_https": app.state.config.SERPSTACK_HTTPS, + "serper_api_key": app.state.config.SERPER_API_KEY, + "serply_api_key": app.state.config.SERPLY_API_KEY, + "tavily_api_key": app.state.config.TAVILY_API_KEY, + "result_count": app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + "concurrent_requests": app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS, + }, + }, + } + + +class ContentExtractionConfig(BaseModel): + engine: str = "" + tika_server_url: Optional[str] = None + + +class ChunkParamUpdateForm(BaseModel): + chunk_size: int + chunk_overlap: int + + +class YoutubeLoaderConfig(BaseModel): + language: List[str] + translation: Optional[str] = None + + +class WebSearchConfig(BaseModel): + enabled: bool + engine: Optional[str] = None + searxng_query_url: Optional[str] = None + google_pse_api_key: Optional[str] = None + google_pse_engine_id: Optional[str] = None + brave_search_api_key: Optional[str] = None + serpstack_api_key: Optional[str] = None + serpstack_https: Optional[bool] = None + serper_api_key: Optional[str] = None + serply_api_key: Optional[str] = None + tavily_api_key: Optional[str] = None + result_count: Optional[int] = None + concurrent_requests: Optional[int] = None + + +class WebConfig(BaseModel): + search: WebSearchConfig + web_loader_ssl_verification: Optional[bool] = None + + +class ConfigUpdateForm(BaseModel): + pdf_extract_images: Optional[bool] = None + content_extraction: Optional[ContentExtractionConfig] = None + chunk: Optional[ChunkParamUpdateForm] = None + youtube: Optional[YoutubeLoaderConfig] = None + web: Optional[WebConfig] = None + + +@app.post("/config/update") +async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)): + app.state.config.PDF_EXTRACT_IMAGES = ( + form_data.pdf_extract_images + if form_data.pdf_extract_images is not None + else app.state.config.PDF_EXTRACT_IMAGES + ) + + if form_data.content_extraction is not None: + log.info(f"Updating text settings: {form_data.content_extraction}") + app.state.config.CONTENT_EXTRACTION_ENGINE = form_data.content_extraction.engine + app.state.config.TIKA_SERVER_URL = form_data.content_extraction.tika_server_url + + if form_data.chunk is not None: + app.state.config.CHUNK_SIZE = form_data.chunk.chunk_size + app.state.config.CHUNK_OVERLAP = form_data.chunk.chunk_overlap + + if form_data.youtube is not None: + app.state.config.YOUTUBE_LOADER_LANGUAGE = form_data.youtube.language + app.state.YOUTUBE_LOADER_TRANSLATION = form_data.youtube.translation + + if form_data.web is not None: + app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = ( + form_data.web.web_loader_ssl_verification + ) + + app.state.config.ENABLE_RAG_WEB_SEARCH = form_data.web.search.enabled + app.state.config.RAG_WEB_SEARCH_ENGINE = form_data.web.search.engine + app.state.config.SEARXNG_QUERY_URL = form_data.web.search.searxng_query_url + app.state.config.GOOGLE_PSE_API_KEY = form_data.web.search.google_pse_api_key + app.state.config.GOOGLE_PSE_ENGINE_ID = ( + form_data.web.search.google_pse_engine_id + ) + app.state.config.BRAVE_SEARCH_API_KEY = ( + form_data.web.search.brave_search_api_key + ) + app.state.config.SERPSTACK_API_KEY = form_data.web.search.serpstack_api_key + app.state.config.SERPSTACK_HTTPS = form_data.web.search.serpstack_https + app.state.config.SERPER_API_KEY = form_data.web.search.serper_api_key + app.state.config.SERPLY_API_KEY = form_data.web.search.serply_api_key + app.state.config.TAVILY_API_KEY = form_data.web.search.tavily_api_key + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = form_data.web.search.result_count + app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = ( + form_data.web.search.concurrent_requests + ) + + return { + "status": True, + "pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES, + "content_extraction": { + "engine": app.state.config.CONTENT_EXTRACTION_ENGINE, + "tika_server_url": app.state.config.TIKA_SERVER_URL, + }, + "chunk": { + "chunk_size": app.state.config.CHUNK_SIZE, + "chunk_overlap": app.state.config.CHUNK_OVERLAP, + }, + "youtube": { + "language": app.state.config.YOUTUBE_LOADER_LANGUAGE, + "translation": app.state.YOUTUBE_LOADER_TRANSLATION, + }, + "web": { + "ssl_verification": app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION, + "search": { + "enabled": app.state.config.ENABLE_RAG_WEB_SEARCH, + "engine": app.state.config.RAG_WEB_SEARCH_ENGINE, + "searxng_query_url": app.state.config.SEARXNG_QUERY_URL, + "google_pse_api_key": app.state.config.GOOGLE_PSE_API_KEY, + "google_pse_engine_id": app.state.config.GOOGLE_PSE_ENGINE_ID, + "brave_search_api_key": app.state.config.BRAVE_SEARCH_API_KEY, + "serpstack_api_key": app.state.config.SERPSTACK_API_KEY, + "serpstack_https": app.state.config.SERPSTACK_HTTPS, + "serper_api_key": app.state.config.SERPER_API_KEY, + "serply_api_key": app.state.config.SERPLY_API_KEY, + "tavily_api_key": app.state.config.TAVILY_API_KEY, + "result_count": app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + "concurrent_requests": app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS, + }, + }, + } + + +@app.get("/template") +async def get_rag_template(user=Depends(get_verified_user)): + return { + "status": True, + "template": app.state.config.RAG_TEMPLATE, + } + + +@app.get("/query/settings") +async def get_query_settings(user=Depends(get_admin_user)): + return { + "status": True, + "template": app.state.config.RAG_TEMPLATE, + "k": app.state.config.TOP_K, + "r": app.state.config.RELEVANCE_THRESHOLD, + "hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH, + } + + +class QuerySettingsForm(BaseModel): + k: Optional[int] = None + r: Optional[float] = None + template: Optional[str] = None + hybrid: Optional[bool] = None + + +@app.post("/query/settings/update") +async def update_query_settings( + form_data: QuerySettingsForm, user=Depends(get_admin_user) +): + app.state.config.RAG_TEMPLATE = ( + form_data.template if form_data.template else RAG_TEMPLATE + ) + app.state.config.TOP_K = form_data.k if form_data.k else 4 + app.state.config.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0 + app.state.config.ENABLE_RAG_HYBRID_SEARCH = ( + form_data.hybrid if form_data.hybrid else False + ) + return { + "status": True, + "template": app.state.config.RAG_TEMPLATE, + "k": app.state.config.TOP_K, + "r": app.state.config.RELEVANCE_THRESHOLD, + "hybrid": app.state.config.ENABLE_RAG_HYBRID_SEARCH, + } + + +class QueryDocForm(BaseModel): + collection_name: str + query: str + k: Optional[int] = None + r: Optional[float] = None + hybrid: Optional[bool] = None + + +@app.post("/query/doc") +def query_doc_handler( + form_data: QueryDocForm, + user=Depends(get_verified_user), +): + try: + if app.state.config.ENABLE_RAG_HYBRID_SEARCH: + return query_doc_with_hybrid_search( + collection_name=form_data.collection_name, + query=form_data.query, + embedding_function=app.state.EMBEDDING_FUNCTION, + k=form_data.k if form_data.k else app.state.config.TOP_K, + reranking_function=app.state.sentence_transformer_rf, + r=( + form_data.r if form_data.r else app.state.config.RELEVANCE_THRESHOLD + ), + ) + else: + return query_doc( + collection_name=form_data.collection_name, + query=form_data.query, + embedding_function=app.state.EMBEDDING_FUNCTION, + k=form_data.k if form_data.k else app.state.config.TOP_K, + ) + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +class QueryCollectionsForm(BaseModel): + collection_names: List[str] + query: str + k: Optional[int] = None + r: Optional[float] = None + hybrid: Optional[bool] = None + + +@app.post("/query/collection") +def query_collection_handler( + form_data: QueryCollectionsForm, + user=Depends(get_verified_user), +): + try: + if app.state.config.ENABLE_RAG_HYBRID_SEARCH: + return query_collection_with_hybrid_search( + collection_names=form_data.collection_names, + query=form_data.query, + embedding_function=app.state.EMBEDDING_FUNCTION, + k=form_data.k if form_data.k else app.state.config.TOP_K, + reranking_function=app.state.sentence_transformer_rf, + r=( + form_data.r if form_data.r else app.state.config.RELEVANCE_THRESHOLD + ), + ) + else: + return query_collection( + collection_names=form_data.collection_names, + query=form_data.query, + embedding_function=app.state.EMBEDDING_FUNCTION, + k=form_data.k if form_data.k else app.state.config.TOP_K, + ) + + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +@app.post("/youtube") +def store_youtube_video(form_data: UrlForm, user=Depends(get_verified_user)): + try: + loader = YoutubeLoader.from_youtube_url( + form_data.url, + add_video_info=True, + language=app.state.config.YOUTUBE_LOADER_LANGUAGE, + translation=app.state.YOUTUBE_LOADER_TRANSLATION, + ) + data = loader.load() + + collection_name = form_data.collection_name + if collection_name == "": + collection_name = calculate_sha256_string(form_data.url)[:63] + + store_data_in_vector_db(data, collection_name, overwrite=True) + return { + "status": True, + "collection_name": collection_name, + "filename": form_data.url, + } + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +@app.post("/web") +def store_web(form_data: UrlForm, user=Depends(get_verified_user)): + # "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm" + try: + loader = get_web_loader( + form_data.url, + verify_ssl=app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION, + ) + data = loader.load() + + collection_name = form_data.collection_name + if collection_name == "": + collection_name = calculate_sha256_string(form_data.url)[:63] + + store_data_in_vector_db(data, collection_name, overwrite=True) + return { + "status": True, + "collection_name": collection_name, + "filename": form_data.url, + } + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +def get_web_loader(url: Union[str, Sequence[str]], verify_ssl: bool = True): + # Check if the URL is valid + if not validate_url(url): + raise ValueError(ERROR_MESSAGES.INVALID_URL) + return SafeWebBaseLoader( + url, + verify_ssl=verify_ssl, + requests_per_second=RAG_WEB_SEARCH_CONCURRENT_REQUESTS, + continue_on_failure=True, + ) + + +def validate_url(url: Union[str, Sequence[str]]): + if isinstance(url, str): + if isinstance(validators.url(url), validators.ValidationError): + raise ValueError(ERROR_MESSAGES.INVALID_URL) + if not ENABLE_RAG_LOCAL_WEB_FETCH: + # Local web fetch is disabled, filter out any URLs that resolve to private IP addresses + parsed_url = urllib.parse.urlparse(url) + # Get IPv4 and IPv6 addresses + ipv4_addresses, ipv6_addresses = resolve_hostname(parsed_url.hostname) + # Check if any of the resolved addresses are private + # This is technically still vulnerable to DNS rebinding attacks, as we don't control WebBaseLoader + for ip in ipv4_addresses: + if validators.ipv4(ip, private=True): + raise ValueError(ERROR_MESSAGES.INVALID_URL) + for ip in ipv6_addresses: + if validators.ipv6(ip, private=True): + raise ValueError(ERROR_MESSAGES.INVALID_URL) + return True + elif isinstance(url, Sequence): + return all(validate_url(u) for u in url) + else: + return False + + +def resolve_hostname(hostname): + # Get address information + addr_info = socket.getaddrinfo(hostname, None) + + # Extract IP addresses from address information + ipv4_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET] + ipv6_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET6] + + return ipv4_addresses, ipv6_addresses + + +def search_web(engine: str, query: str) -> list[SearchResult]: + """Search the web using a search engine and return the results as a list of SearchResult objects. + Will look for a search engine API key in environment variables in the following order: + - SEARXNG_QUERY_URL + - GOOGLE_PSE_API_KEY + GOOGLE_PSE_ENGINE_ID + - BRAVE_SEARCH_API_KEY + - SERPSTACK_API_KEY + - SERPER_API_KEY + - SERPLY_API_KEY + - TAVILY_API_KEY + Args: + query (str): The query to search for + """ + + # TODO: add playwright to search the web + if engine == "searxng": + if app.state.config.SEARXNG_QUERY_URL: + return search_searxng( + app.state.config.SEARXNG_QUERY_URL, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + else: + raise Exception("No SEARXNG_QUERY_URL found in environment variables") + elif engine == "google_pse": + if ( + app.state.config.GOOGLE_PSE_API_KEY + and app.state.config.GOOGLE_PSE_ENGINE_ID + ): + return search_google_pse( + app.state.config.GOOGLE_PSE_API_KEY, + app.state.config.GOOGLE_PSE_ENGINE_ID, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + else: + raise Exception( + "No GOOGLE_PSE_API_KEY or GOOGLE_PSE_ENGINE_ID found in environment variables" + ) + elif engine == "brave": + if app.state.config.BRAVE_SEARCH_API_KEY: + return search_brave( + app.state.config.BRAVE_SEARCH_API_KEY, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + else: + raise Exception("No BRAVE_SEARCH_API_KEY found in environment variables") + elif engine == "serpstack": + if app.state.config.SERPSTACK_API_KEY: + return search_serpstack( + app.state.config.SERPSTACK_API_KEY, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + https_enabled=app.state.config.SERPSTACK_HTTPS, + ) + else: + raise Exception("No SERPSTACK_API_KEY found in environment variables") + elif engine == "serper": + if app.state.config.SERPER_API_KEY: + return search_serper( + app.state.config.SERPER_API_KEY, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + else: + raise Exception("No SERPER_API_KEY found in environment variables") + elif engine == "serply": + if app.state.config.SERPLY_API_KEY: + return search_serply( + app.state.config.SERPLY_API_KEY, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + else: + raise Exception("No SERPLY_API_KEY found in environment variables") + elif engine == "duckduckgo": + return search_duckduckgo( + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST, + ) + elif engine == "tavily": + if app.state.config.TAVILY_API_KEY: + return search_tavily( + app.state.config.TAVILY_API_KEY, + query, + app.state.config.RAG_WEB_SEARCH_RESULT_COUNT, + ) + else: + raise Exception("No TAVILY_API_KEY found in environment variables") + elif engine == "jina": + return search_jina(query, app.state.config.RAG_WEB_SEARCH_RESULT_COUNT) + else: + raise Exception("No search engine API key found in environment variables") + + +@app.post("/web/search") +def store_web_search(form_data: SearchForm, user=Depends(get_verified_user)): + try: + logging.info( + f"trying to web search with {app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query}" + ) + web_results = search_web( + app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query + ) + except Exception as e: + log.exception(e) + + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.WEB_SEARCH_ERROR(e), + ) + + try: + urls = [result.link for result in web_results] + loader = get_web_loader(urls) + data = loader.load() + + collection_name = form_data.collection_name + if collection_name == "": + collection_name = calculate_sha256_string(form_data.query)[:63] + + store_data_in_vector_db(data, collection_name, overwrite=True) + return { + "status": True, + "collection_name": collection_name, + "filenames": urls, + } + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +def store_data_in_vector_db( + data, collection_name, metadata: Optional[dict] = None, overwrite: bool = False +) -> bool: + + text_splitter = RecursiveCharacterTextSplitter( + chunk_size=app.state.config.CHUNK_SIZE, + chunk_overlap=app.state.config.CHUNK_OVERLAP, + add_start_index=True, + ) + + docs = text_splitter.split_documents(data) + + if len(docs) > 0: + log.info(f"store_data_in_vector_db {docs}") + return store_docs_in_vector_db(docs, collection_name, metadata, overwrite), None + else: + raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT) + + +def store_text_in_vector_db( + text, metadata, collection_name, overwrite: bool = False +) -> bool: + text_splitter = RecursiveCharacterTextSplitter( + chunk_size=app.state.config.CHUNK_SIZE, + chunk_overlap=app.state.config.CHUNK_OVERLAP, + add_start_index=True, + ) + docs = text_splitter.create_documents([text], metadatas=[metadata]) + return store_docs_in_vector_db(docs, collection_name, overwrite=overwrite) + + +def store_docs_in_vector_db( + docs, collection_name, metadata: Optional[dict] = None, overwrite: bool = False +) -> bool: + log.info(f"store_docs_in_vector_db {docs} {collection_name}") + + texts = [doc.page_content for doc in docs] + metadatas = [{**doc.metadata, **(metadata if metadata else {})} for doc in docs] + + # ChromaDB does not like datetime formats + # for meta-data so convert them to string. + for metadata in metadatas: + for key, value in metadata.items(): + if isinstance(value, datetime): + metadata[key] = str(value) + + try: + if overwrite: + for collection in CHROMA_CLIENT.list_collections(): + if collection_name == collection.name: + log.info(f"deleting existing collection {collection_name}") + CHROMA_CLIENT.delete_collection(name=collection_name) + + collection = CHROMA_CLIENT.create_collection(name=collection_name) + + embedding_func = get_embedding_function( + app.state.config.RAG_EMBEDDING_ENGINE, + app.state.config.RAG_EMBEDDING_MODEL, + app.state.sentence_transformer_ef, + app.state.config.OPENAI_API_KEY, + app.state.config.OPENAI_API_BASE_URL, + app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE, + ) + + embedding_texts = list(map(lambda x: x.replace("\n", " "), texts)) + embeddings = embedding_func(embedding_texts) + + for batch in create_batches( + api=CHROMA_CLIENT, + ids=[str(uuid.uuid4()) for _ in texts], + metadatas=metadatas, + embeddings=embeddings, + documents=texts, + ): + collection.add(*batch) + + return True + except Exception as e: + if e.__class__.__name__ == "UniqueConstraintError": + return True + + log.exception(e) + + return False + + +class TikaLoader: + def __init__(self, file_path, mime_type=None): + self.file_path = file_path + self.mime_type = mime_type + + def load(self) -> List[Document]: + with open(self.file_path, "rb") as f: + data = f.read() + + if self.mime_type is not None: + headers = {"Content-Type": self.mime_type} + else: + headers = {} + + endpoint = app.state.config.TIKA_SERVER_URL + if not endpoint.endswith("/"): + endpoint += "/" + endpoint += "tika/text" + + r = requests.put(endpoint, data=data, headers=headers) + + if r.ok: + raw_metadata = r.json() + text = raw_metadata.get("X-TIKA:content", "") + + if "Content-Type" in raw_metadata: + headers["Content-Type"] = raw_metadata["Content-Type"] + + log.info("Tika extracted text: %s", text) + + return [Document(page_content=text, metadata=headers)] + else: + raise Exception(f"Error calling Tika: {r.reason}") + + +def get_loader(filename: str, file_content_type: str, file_path: str): + file_ext = filename.split(".")[-1].lower() + known_type = True + + known_source_ext = [ + "go", + "py", + "java", + "sh", + "bat", + "ps1", + "cmd", + "js", + "ts", + "css", + "cpp", + "hpp", + "h", + "c", + "cs", + "sql", + "log", + "ini", + "pl", + "pm", + "r", + "dart", + "dockerfile", + "env", + "php", + "hs", + "hsc", + "lua", + "nginxconf", + "conf", + "m", + "mm", + "plsql", + "perl", + "rb", + "rs", + "db2", + "scala", + "bash", + "swift", + "vue", + "svelte", + "msg", + "ex", + "exs", + "erl", + "tsx", + "jsx", + "hs", + "lhs", + ] + + if ( + app.state.config.CONTENT_EXTRACTION_ENGINE == "tika" + and app.state.config.TIKA_SERVER_URL + ): + if file_ext in known_source_ext or ( + file_content_type and file_content_type.find("text/") >= 0 + ): + loader = TextLoader(file_path, autodetect_encoding=True) + else: + loader = TikaLoader(file_path, file_content_type) + else: + if file_ext == "pdf": + loader = PyPDFLoader( + file_path, extract_images=app.state.config.PDF_EXTRACT_IMAGES + ) + elif file_ext == "csv": + loader = CSVLoader(file_path) + elif file_ext == "rst": + loader = UnstructuredRSTLoader(file_path, mode="elements") + elif file_ext == "xml": + loader = UnstructuredXMLLoader(file_path) + elif file_ext in ["htm", "html"]: + loader = BSHTMLLoader(file_path, open_encoding="unicode_escape") + elif file_ext == "md": + loader = UnstructuredMarkdownLoader(file_path) + elif file_content_type == "application/epub+zip": + loader = UnstructuredEPubLoader(file_path) + elif ( + file_content_type + == "application/vnd.openxmlformats-officedocument.wordprocessingml.document" + or file_ext in ["doc", "docx"] + ): + loader = Docx2txtLoader(file_path) + elif file_content_type in [ + "application/vnd.ms-excel", + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + ] or file_ext in ["xls", "xlsx"]: + loader = UnstructuredExcelLoader(file_path) + elif file_content_type in [ + "application/vnd.ms-powerpoint", + "application/vnd.openxmlformats-officedocument.presentationml.presentation", + ] or file_ext in ["ppt", "pptx"]: + loader = UnstructuredPowerPointLoader(file_path) + elif file_ext == "msg": + loader = OutlookMessageLoader(file_path) + elif file_ext in known_source_ext or ( + file_content_type and file_content_type.find("text/") >= 0 + ): + loader = TextLoader(file_path, autodetect_encoding=True) + else: + loader = TextLoader(file_path, autodetect_encoding=True) + known_type = False + + return loader, known_type + + +@app.post("/doc") +def store_doc( + collection_name: Optional[str] = Form(None), + file: UploadFile = File(...), + user=Depends(get_verified_user), +): + # "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm" + + log.info(f"file.content_type: {file.content_type}") + try: + unsanitized_filename = file.filename + filename = os.path.basename(unsanitized_filename) + + file_path = f"{UPLOAD_DIR}/{filename}" + + contents = file.file.read() + with open(file_path, "wb") as f: + f.write(contents) + f.close() + + f = open(file_path, "rb") + if collection_name == None: + collection_name = calculate_sha256(f)[:63] + f.close() + + loader, known_type = get_loader(filename, file.content_type, file_path) + data = loader.load() + + try: + result = store_data_in_vector_db(data, collection_name) + + if result: + return { + "status": True, + "collection_name": collection_name, + "filename": filename, + "known_type": known_type, + } + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=e, + ) + except Exception as e: + log.exception(e) + if "No pandoc was found" in str(e): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED, + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +class ProcessDocForm(BaseModel): + file_id: str + collection_name: Optional[str] = None + + +@app.post("/process/doc") +def process_doc( + form_data: ProcessDocForm, + user=Depends(get_verified_user), +): + try: + file = Files.get_file_by_id(form_data.file_id) + file_path = file.meta.get("path", f"{UPLOAD_DIR}/{file.filename}") + + f = open(file_path, "rb") + + collection_name = form_data.collection_name + if collection_name == None: + collection_name = calculate_sha256(f)[:63] + f.close() + + loader, known_type = get_loader( + file.filename, file.meta.get("content_type"), file_path + ) + data = loader.load() + + try: + result = store_data_in_vector_db( + data, + collection_name, + { + "file_id": form_data.file_id, + "name": file.meta.get("name", file.filename), + }, + ) + + if result: + return { + "status": True, + "collection_name": collection_name, + "known_type": known_type, + "filename": file.meta.get("name", file.filename), + } + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=e, + ) + except Exception as e: + log.exception(e) + if "No pandoc was found" in str(e): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED, + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +class TextRAGForm(BaseModel): + name: str + content: str + collection_name: Optional[str] = None + + +@app.post("/text") +def store_text( + form_data: TextRAGForm, + user=Depends(get_verified_user), +): + + collection_name = form_data.collection_name + if collection_name == None: + collection_name = calculate_sha256_string(form_data.content) + + result = store_text_in_vector_db( + form_data.content, + metadata={"name": form_data.name, "created_by": user.id}, + collection_name=collection_name, + ) + + if result: + return {"status": True, "collection_name": collection_name} + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=ERROR_MESSAGES.DEFAULT(), + ) + + +@app.get("/scan") +def scan_docs_dir(user=Depends(get_admin_user)): + for path in Path(DOCS_DIR).rglob("./**/*"): + try: + if path.is_file() and not path.name.startswith("."): + tags = extract_folders_after_data_docs(path) + filename = path.name + file_content_type = mimetypes.guess_type(path) + + f = open(path, "rb") + collection_name = calculate_sha256(f)[:63] + f.close() + + loader, known_type = get_loader( + filename, file_content_type[0], str(path) + ) + data = loader.load() + + try: + result = store_data_in_vector_db(data, collection_name) + + if result: + sanitized_filename = sanitize_filename(filename) + doc = Documents.get_doc_by_name(sanitized_filename) + + if doc == None: + doc = Documents.insert_new_doc( + user.id, + DocumentForm( + **{ + "name": sanitized_filename, + "title": filename, + "collection_name": collection_name, + "filename": filename, + "content": ( + json.dumps( + { + "tags": list( + map( + lambda name: {"name": name}, + tags, + ) + ) + } + ) + if len(tags) + else "{}" + ), + } + ), + ) + except Exception as e: + log.exception(e) + pass + + except Exception as e: + log.exception(e) + + return True + + +@app.get("/reset/db") +def reset_vector_db(user=Depends(get_admin_user)): + CHROMA_CLIENT.reset() + + +@app.get("/reset/uploads") +def reset_upload_dir(user=Depends(get_admin_user)) -> bool: + folder = f"{UPLOAD_DIR}" + try: + # Check if the directory exists + if os.path.exists(folder): + # Iterate over all the files and directories in the specified directory + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) # Remove the file or link + elif os.path.isdir(file_path): + shutil.rmtree(file_path) # Remove the directory + except Exception as e: + print(f"Failed to delete {file_path}. Reason: {e}") + else: + print(f"The directory {folder} does not exist") + except Exception as e: + print(f"Failed to process the directory {folder}. Reason: {e}") + + return True + + +@app.get("/reset") +def reset(user=Depends(get_admin_user)) -> bool: + folder = f"{UPLOAD_DIR}" + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + log.error("Failed to delete %s. Reason: %s" % (file_path, e)) + + try: + CHROMA_CLIENT.reset() + except Exception as e: + log.exception(e) + + return True + + +class SafeWebBaseLoader(WebBaseLoader): + """WebBaseLoader with enhanced error handling for URLs.""" + + def lazy_load(self) -> Iterator[Document]: + """Lazy load text from the url(s) in web_path with error handling.""" + for path in self.web_paths: + try: + soup = self._scrape(path, bs_kwargs=self.bs_kwargs) + text = soup.get_text(**self.bs_get_text_kwargs) + + # Build metadata + metadata = {"source": path} + if title := soup.find("title"): + metadata["title"] = title.get_text() + if description := soup.find("meta", attrs={"name": "description"}): + metadata["description"] = description.get( + "content", "No description found." + ) + if html := soup.find("html"): + metadata["language"] = html.get("lang", "No language found.") + + yield Document(page_content=text, metadata=metadata) + except Exception as e: + # Log the error and continue with the next URL + log.error(f"Error loading {path}: {e}") + + +if ENV == "dev": + + @app.get("/ef") + async def get_embeddings(): + return {"result": app.state.EMBEDDING_FUNCTION("hello world")} + + @app.get("/ef/{text}") + async def get_embeddings_text(text: str): + return {"result": app.state.EMBEDDING_FUNCTION(text)} diff --git a/backend/apps/rag/search/brave.py b/backend/apps/rag/search/brave.py new file mode 100644 index 0000000000000000000000000000000000000000..76ad1fb4731d2e572027cec32f65267a3f9f3675 --- /dev/null +++ b/backend/apps/rag/search/brave.py @@ -0,0 +1,42 @@ +import logging +from typing import List, Optional +import requests + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_brave( + api_key: str, query: str, count: int, filter_list: Optional[List[str]] = None +) -> list[SearchResult]: + """Search using Brave's Search API and return the results as a list of SearchResult objects. + + Args: + api_key (str): A Brave Search API key + query (str): The query to search for + """ + url = "https://api.search.brave.com/res/v1/web/search" + headers = { + "Accept": "application/json", + "Accept-Encoding": "gzip", + "X-Subscription-Token": api_key, + } + params = {"q": query, "count": count} + + response = requests.get(url, headers=headers, params=params) + response.raise_for_status() + + json_response = response.json() + results = json_response.get("web", {}).get("results", []) + if filter_list: + results = get_filtered_results(results, filter_list) + + return [ + SearchResult( + link=result["url"], title=result.get("title"), snippet=result.get("snippet") + ) + for result in results[:count] + ] diff --git a/backend/apps/rag/search/duckduckgo.py b/backend/apps/rag/search/duckduckgo.py new file mode 100644 index 0000000000000000000000000000000000000000..f0cc2a71035d5279d9c184f2c20787d41b16de52 --- /dev/null +++ b/backend/apps/rag/search/duckduckgo.py @@ -0,0 +1,49 @@ +import logging +from typing import List, Optional +from apps.rag.search.main import SearchResult, get_filtered_results +from duckduckgo_search import DDGS +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_duckduckgo( + query: str, count: int, filter_list: Optional[List[str]] = None +) -> list[SearchResult]: + """ + Search using DuckDuckGo's Search API and return the results as a list of SearchResult objects. + Args: + query (str): The query to search for + count (int): The number of results to return + + Returns: + List[SearchResult]: A list of search results + """ + # Use the DDGS context manager to create a DDGS object + with DDGS() as ddgs: + # Use the ddgs.text() method to perform the search + ddgs_gen = ddgs.text( + query, safesearch="moderate", max_results=count, backend="api" + ) + # Check if there are search results + if ddgs_gen: + # Convert the search results into a list + search_results = [r for r in ddgs_gen] + + # Create an empty list to store the SearchResult objects + results = [] + # Iterate over each search result + for result in search_results: + # Create a SearchResult object and append it to the results list + results.append( + SearchResult( + link=result["href"], + title=result.get("title"), + snippet=result.get("body"), + ) + ) + if filter_list: + results = get_filtered_results(results, filter_list) + # Return the list of search results + return results diff --git a/backend/apps/rag/search/google_pse.py b/backend/apps/rag/search/google_pse.py new file mode 100644 index 0000000000000000000000000000000000000000..0c78512e74ef82d391dad426c42e55e3a25a148c --- /dev/null +++ b/backend/apps/rag/search/google_pse.py @@ -0,0 +1,51 @@ +import json +import logging +from typing import List, Optional +import requests + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_google_pse( + api_key: str, + search_engine_id: str, + query: str, + count: int, + filter_list: Optional[List[str]] = None, +) -> list[SearchResult]: + """Search using Google's Programmable Search Engine API and return the results as a list of SearchResult objects. + + Args: + api_key (str): A Programmable Search Engine API key + search_engine_id (str): A Programmable Search Engine ID + query (str): The query to search for + """ + url = "https://www.googleapis.com/customsearch/v1" + + headers = {"Content-Type": "application/json"} + params = { + "cx": search_engine_id, + "q": query, + "key": api_key, + "num": count, + } + + response = requests.request("GET", url, headers=headers, params=params) + response.raise_for_status() + + json_response = response.json() + results = json_response.get("items", []) + if filter_list: + results = get_filtered_results(results, filter_list) + return [ + SearchResult( + link=result["link"], + title=result.get("title"), + snippet=result.get("snippet"), + ) + for result in results + ] diff --git a/backend/apps/rag/search/jina_search.py b/backend/apps/rag/search/jina_search.py new file mode 100644 index 0000000000000000000000000000000000000000..65f9ad68fe7984fa0568ac6b32367a1cf8632c26 --- /dev/null +++ b/backend/apps/rag/search/jina_search.py @@ -0,0 +1,41 @@ +import logging +import requests +from yarl import URL + +from apps.rag.search.main import SearchResult +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_jina(query: str, count: int) -> list[SearchResult]: + """ + Search using Jina's Search API and return the results as a list of SearchResult objects. + Args: + query (str): The query to search for + count (int): The number of results to return + + Returns: + List[SearchResult]: A list of search results + """ + jina_search_endpoint = "https://s.jina.ai/" + headers = { + "Accept": "application/json", + } + url = str(URL(jina_search_endpoint + query)) + response = requests.get(url, headers=headers) + response.raise_for_status() + data = response.json() + + results = [] + for result in data["data"][:count]: + results.append( + SearchResult( + link=result["url"], + title=result.get("title"), + snippet=result.get("content"), + ) + ) + + return results diff --git a/backend/apps/rag/search/main.py b/backend/apps/rag/search/main.py new file mode 100644 index 0000000000000000000000000000000000000000..49056f1fd1ec74f2e5cea0aade8e9d170f7b1c58 --- /dev/null +++ b/backend/apps/rag/search/main.py @@ -0,0 +1,20 @@ +from typing import Optional +from urllib.parse import urlparse +from pydantic import BaseModel + + +def get_filtered_results(results, filter_list): + if not filter_list: + return results + filtered_results = [] + for result in results: + domain = urlparse(result["url"]).netloc + if any(domain.endswith(filtered_domain) for filtered_domain in filter_list): + filtered_results.append(result) + return filtered_results + + +class SearchResult(BaseModel): + link: str + title: Optional[str] + snippet: Optional[str] diff --git a/backend/apps/rag/search/searxng.py b/backend/apps/rag/search/searxng.py new file mode 100644 index 0000000000000000000000000000000000000000..6e545e994e8659c63a82cf0fce48b4724e042d84 --- /dev/null +++ b/backend/apps/rag/search/searxng.py @@ -0,0 +1,92 @@ +import logging +import requests + +from typing import List, Optional + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_searxng( + query_url: str, + query: str, + count: int, + filter_list: Optional[List[str]] = None, + **kwargs, +) -> List[SearchResult]: + """ + Search a SearXNG instance for a given query and return the results as a list of SearchResult objects. + + The function allows passing additional parameters such as language or time_range to tailor the search result. + + Args: + query_url (str): The base URL of the SearXNG server. + query (str): The search term or question to find in the SearXNG database. + count (int): The maximum number of results to retrieve from the search. + + Keyword Args: + language (str): Language filter for the search results; e.g., "en-US". Defaults to an empty string. + safesearch (int): Safe search filter for safer web results; 0 = off, 1 = moderate, 2 = strict. Defaults to 1 (moderate). + time_range (str): Time range for filtering results by date; e.g., "2023-04-05..today" or "all-time". Defaults to ''. + categories: (Optional[List[str]]): Specific categories within which the search should be performed, defaulting to an empty string if not provided. + + Returns: + List[SearchResult]: A list of SearchResults sorted by relevance score in descending order. + + Raise: + requests.exceptions.RequestException: If a request error occurs during the search process. + """ + + # Default values for optional parameters are provided as empty strings or None when not specified. + language = kwargs.get("language", "en-US") + safesearch = kwargs.get("safesearch", "1") + time_range = kwargs.get("time_range", "") + categories = "".join(kwargs.get("categories", [])) + + params = { + "q": query, + "format": "json", + "pageno": 1, + "safesearch": safesearch, + "language": language, + "time_range": time_range, + "categories": categories, + "theme": "simple", + "image_proxy": 0, + } + + # Legacy query format + if "" in query_url: + # Strip all query parameters from the URL + query_url = query_url.split("?")[0] + + log.debug(f"searching {query_url}") + + response = requests.get( + query_url, + headers={ + "User-Agent": "Open WebUI (https://github.com/open-webui/open-webui) RAG Bot", + "Accept": "text/html", + "Accept-Encoding": "gzip, deflate", + "Accept-Language": "en-US,en;q=0.5", + "Connection": "keep-alive", + }, + params=params, + ) + + response.raise_for_status() # Raise an exception for HTTP errors. + + json_response = response.json() + results = json_response.get("results", []) + sorted_results = sorted(results, key=lambda x: x.get("score", 0), reverse=True) + if filter_list: + sorted_results = get_filtered_results(sorted_results, filter_list) + return [ + SearchResult( + link=result["url"], title=result.get("title"), snippet=result.get("content") + ) + for result in sorted_results[:count] + ] diff --git a/backend/apps/rag/search/serper.py b/backend/apps/rag/search/serper.py new file mode 100644 index 0000000000000000000000000000000000000000..b278a4df15a1322a03dad373518043cf2c003eab --- /dev/null +++ b/backend/apps/rag/search/serper.py @@ -0,0 +1,43 @@ +import json +import logging +from typing import List, Optional +import requests + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_serper( + api_key: str, query: str, count: int, filter_list: Optional[List[str]] = None +) -> list[SearchResult]: + """Search using serper.dev's API and return the results as a list of SearchResult objects. + + Args: + api_key (str): A serper.dev API key + query (str): The query to search for + """ + url = "https://google.serper.dev/search" + + payload = json.dumps({"q": query}) + headers = {"X-API-KEY": api_key, "Content-Type": "application/json"} + + response = requests.request("POST", url, headers=headers, data=payload) + response.raise_for_status() + + json_response = response.json() + results = sorted( + json_response.get("organic", []), key=lambda x: x.get("position", 0) + ) + if filter_list: + results = get_filtered_results(results, filter_list) + return [ + SearchResult( + link=result["link"], + title=result.get("title"), + snippet=result.get("description"), + ) + for result in results[:count] + ] diff --git a/backend/apps/rag/search/serply.py b/backend/apps/rag/search/serply.py new file mode 100644 index 0000000000000000000000000000000000000000..24b249b739425b9ad941c336f3daa76f46272297 --- /dev/null +++ b/backend/apps/rag/search/serply.py @@ -0,0 +1,70 @@ +import json +import logging +from typing import List, Optional +import requests +from urllib.parse import urlencode + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_serply( + api_key: str, + query: str, + count: int, + hl: str = "us", + limit: int = 10, + device_type: str = "desktop", + proxy_location: str = "US", + filter_list: Optional[List[str]] = None, +) -> list[SearchResult]: + """Search using serper.dev's API and return the results as a list of SearchResult objects. + + Args: + api_key (str): A serply.io API key + query (str): The query to search for + hl (str): Host Language code to display results in (reference https://developers.google.com/custom-search/docs/xml_results?hl=en#wsInterfaceLanguages) + limit (int): The maximum number of results to return [10-100, defaults to 10] + """ + log.info("Searching with Serply") + + url = "https://api.serply.io/v1/search/" + + query_payload = { + "q": query, + "language": "en", + "num": limit, + "gl": proxy_location.upper(), + "hl": hl.lower(), + } + + url = f"{url}{urlencode(query_payload)}" + headers = { + "X-API-KEY": api_key, + "X-User-Agent": device_type, + "User-Agent": "open-webui", + "X-Proxy-Location": proxy_location, + } + + response = requests.request("GET", url, headers=headers) + response.raise_for_status() + + json_response = response.json() + log.info(f"results from serply search: {json_response}") + + results = sorted( + json_response.get("results", []), key=lambda x: x.get("realPosition", 0) + ) + if filter_list: + results = get_filtered_results(results, filter_list) + return [ + SearchResult( + link=result["link"], + title=result.get("title"), + snippet=result.get("description"), + ) + for result in results[:count] + ] diff --git a/backend/apps/rag/search/serpstack.py b/backend/apps/rag/search/serpstack.py new file mode 100644 index 0000000000000000000000000000000000000000..64b0f117d906414714d56470271ca13a0a9d4bb9 --- /dev/null +++ b/backend/apps/rag/search/serpstack.py @@ -0,0 +1,49 @@ +import json +import logging +from typing import List, Optional +import requests + +from apps.rag.search.main import SearchResult, get_filtered_results +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_serpstack( + api_key: str, + query: str, + count: int, + filter_list: Optional[List[str]] = None, + https_enabled: bool = True, +) -> list[SearchResult]: + """Search using serpstack.com's and return the results as a list of SearchResult objects. + + Args: + api_key (str): A serpstack.com API key + query (str): The query to search for + https_enabled (bool): Whether to use HTTPS or HTTP for the API request + """ + url = f"{'https' if https_enabled else 'http'}://api.serpstack.com/search" + + headers = {"Content-Type": "application/json"} + params = { + "access_key": api_key, + "query": query, + } + + response = requests.request("POST", url, headers=headers, params=params) + response.raise_for_status() + + json_response = response.json() + results = sorted( + json_response.get("organic_results", []), key=lambda x: x.get("position", 0) + ) + if filter_list: + results = get_filtered_results(results, filter_list) + return [ + SearchResult( + link=result["url"], title=result.get("title"), snippet=result.get("snippet") + ) + for result in results[:count] + ] diff --git a/backend/apps/rag/search/tavily.py b/backend/apps/rag/search/tavily.py new file mode 100644 index 0000000000000000000000000000000000000000..b15d6ef9d5b55aa2eee6f77212e6561f1fbaf4fa --- /dev/null +++ b/backend/apps/rag/search/tavily.py @@ -0,0 +1,39 @@ +import logging + +import requests + +from apps.rag.search.main import SearchResult +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def search_tavily(api_key: str, query: str, count: int) -> list[SearchResult]: + """Search using Tavily's Search API and return the results as a list of SearchResult objects. + + Args: + api_key (str): A Tavily Search API key + query (str): The query to search for + + Returns: + List[SearchResult]: A list of search results + """ + url = "https://api.tavily.com/search" + data = {"query": query, "api_key": api_key} + + response = requests.post(url, json=data) + response.raise_for_status() + + json_response = response.json() + + raw_search_results = json_response.get("results", []) + + return [ + SearchResult( + link=result["url"], + title=result.get("title", ""), + snippet=result.get("content"), + ) + for result in raw_search_results[:count] + ] diff --git a/backend/apps/rag/search/testdata/brave.json b/backend/apps/rag/search/testdata/brave.json new file mode 100644 index 0000000000000000000000000000000000000000..38487390d9067236e8d5bd6afcedfd08ae9e8368 --- /dev/null +++ b/backend/apps/rag/search/testdata/brave.json @@ -0,0 +1,998 @@ +{ + "query": { + "original": "python", + "show_strict_warning": false, + "is_navigational": true, + "is_news_breaking": false, + "spellcheck_off": true, + "country": "us", + "bad_results": false, + "should_fallback": false, + "postal_code": "", + "city": "", + "header_country": "", + "more_results_available": true, + "state": "" + }, + "mixed": { + "type": "mixed", + "main": [ + { + "type": "web", + "index": 0, + "all": false + }, + { + "type": "web", + "index": 1, + "all": false + }, + { + "type": "news", + "all": true + }, + { + "type": "web", + "index": 2, + "all": false + }, + { + "type": "videos", + "all": true + }, + { + "type": "web", + "index": 3, + "all": false + }, + { + "type": "web", + "index": 4, + "all": false + }, + { + "type": "web", + "index": 5, + "all": false + }, + { + "type": "web", + "index": 6, + "all": false + }, + { + "type": "web", + "index": 7, + "all": false + }, + { + "type": "web", + "index": 8, + "all": false + }, + { + "type": "web", + "index": 9, + "all": false + }, + { + "type": "web", + "index": 10, + "all": false + }, + { + "type": "web", + "index": 11, + "all": false + }, + { + "type": "web", + "index": 12, + "all": false + }, + { + "type": "web", + "index": 13, + "all": false + }, + { + "type": "web", + "index": 14, + "all": false + }, + { + "type": "web", + "index": 15, + "all": false + }, + { + "type": "web", + "index": 16, + "all": false + }, + { + "type": "web", + "index": 17, + "all": false + }, + { + "type": "web", + "index": 18, + "all": false + }, + { + "type": "web", + "index": 19, + "all": false + } + ], + "top": [], + "side": [] + }, + "news": { + "type": "news", + "results": [ + { + "title": "Google lays off staff from Flutter, Dart and Python teams weeks before its developer conference | TechCrunch", + "url": "https://techcrunch.com/2024/05/01/google-lays-off-staff-from-flutter-dart-python-weeks-before-its-developer-conference/", + "is_source_local": false, + "is_source_both": false, + "description": "Google told TechCrunch that Flutter will have new updates to share at I/O this year.", + "page_age": "2024-05-02T17:40:05", + "family_friendly": true, + "meta_url": { + "scheme": "https", + "netloc": "techcrunch.com", + "hostname": "techcrunch.com", + "favicon": "https://imgs.search.brave.com/N6VSEVahheQOb7lqfb47dhUOB4XD-6sfQOP94sCe3Oo/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvZGI5Njk0Yzlk/YWM3ZWMwZjg1MTM1/NmIyMWEyNzBjZDZj/ZDQyNmFlNGU0NDRi/MDgyYjQwOGU0Y2Qy/ZWMwNWQ2ZC90ZWNo/Y3J1bmNoLmNvbS8", + "path": "› 2024 › 05 › 01 › google-lays-off-staff-from-flutter-dart-python-weeks-before-its-developer-conference" + }, + "breaking": false, + "thumbnail": { + "src": "https://imgs.search.brave.com/gCI5UG8muOEOZDAx9vpu6L6r6R00mD7jOF08-biFoyQ/rs:fit:200:200:1/g:ce/aHR0cHM6Ly90ZWNo/Y3J1bmNoLmNvbS93/cC1jb250ZW50L3Vw/bG9hZHMvMjAxOC8x/MS9HZXR0eUltYWdl/cy0xMDAyNDg0NzQ2/LmpwZz9yZXNpemU9/MTIwMCw4MDA" + }, + "age": "3 days ago", + "extra_snippets": [ + "Ahead of Google’s annual I/O developer conference in May, the tech giant has laid off staff across key teams like Flutter, Dart, Python and others, according to reports from affected employees shared on social media. Google confirmed the layoffs to TechCrunch, but not the specific teams, roles or how many people were let go.", + "In a separate post on Reddit, another commenter noted the Python team affected by the layoffs were those who managed the internal Python runtimes and toolchains and worked with OSS Python. Included in this group were “multiple current and former core devs and steering council members,” they said.", + "Meanwhile, others shared on Y Combinator’s Hacker News, where a Python team member detailed their specific duties on the technical front and noted that, for years, much of the work was done with fewer than 10 people. Another Hacker News commenter said their early years on the Python team were spent paying down internal technical debt accumulated from not having a strong Python strategy.", + "CNBC reports that a total of 200 people were let go across Google’s “Core” teams, which included those working on Python, app platforms, and other engineering roles. Some jobs were being shifted to India and Mexico, it said, citing internal documents." + ] + } + ], + "mutated_by_goggles": false + }, + "type": "search", + "videos": { + "type": "videos", + "results": [ + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=b093aqAZiPU", + "title": "👩‍💻 Python for Beginners Tutorial - YouTube", + "description": "In this step-by-step Python for beginner's tutorial, learn how you can get started programming in Python. In this video, I assume that you are completely new...", + "age": "March 25, 2021", + "page_age": "2021-03-25T10:00:08", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/tZI4Do4_EYcTCsD_MvE3Jx8FzjIXwIJ5ZuKhwiWTyZs/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS9i/MDkzYXFBWmlQVS9t/YXhyZXNkZWZhdWx0/LmpwZw" + } + }, + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=rfscVS0vtbw", + "title": "Learn Python - Full Course for Beginners [Tutorial] - YouTube", + "description": "This course will give you a full introduction into all of the core concepts in python. Follow along with the videos and you'll be a python programmer in no t...", + "age": "July 11, 2018", + "page_age": "2018-07-11T18:00:42", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/65zkx_kPU_zJb-4nmvvY-q5-ZZwzceChz-N00V8cqvk/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS9y/ZnNjVlMwdnRidy9t/YXhyZXNkZWZhdWx0/LmpwZw" + } + }, + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=_uQrJ0TkZlc", + "title": "Python Tutorial - Python Full Course for Beginners - YouTube", + "description": "Become a Python pro! 🚀 This comprehensive tutorial takes you from beginner to hero, covering the basics, machine learning, and web development projects.🚀 W...", + "age": "February 18, 2019", + "page_age": "2019-02-18T15:00:08", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/Djiv1pXLq1ClqBSE_86jQnEYR8bW8UJP6Cs7LrgyQzQ/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS9f/dVFySjBUa1psYy9t/YXhyZXNkZWZhdWx0/LmpwZw" + } + }, + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=wRKgzC-MhIc", + "title": "[] and {} vs list() and dict(), which is better?", + "description": "Enjoy the videos and music you love, upload original content, and share it all with friends, family, and the world on YouTube.", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/Hw9ep2Pio13X1VZjRw_h9R2VH_XvZFOuGlQJVnVkeq0/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS93/UktnekMtTWhJYy9o/cWRlZmF1bHQuanBn" + } + }, + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=LWdsF79H1Pg", + "title": "print() vs. return in Python Functions - YouTube", + "description": "In this video, you will learn the differences between the return statement and the print function when they are used inside Python functions. We will see an ...", + "age": "June 11, 2022", + "page_age": "2022-06-11T21:33:26", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/ebglnr5_jwHHpvon3WU-5hzt0eHdTZSVGg3Ts6R38xY/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS9M/V2RzRjc5SDFQZy9t/YXhyZXNkZWZhdWx0/LmpwZw" + } + }, + { + "type": "video_result", + "url": "https://www.youtube.com/watch?v=AovxLr8jUH4", + "title": "Python Tutorial for Beginners 5 - Python print() and input() Function ...", + "description": "In this Video I am going to show How to use print() Function and input() Function in Python. In python The print() function is used to print the specified ...", + "age": "August 28, 2018", + "page_age": "2018-08-28T20:11:09", + "video": {}, + "meta_url": { + "scheme": "https", + "netloc": "youtube.com", + "hostname": "www.youtube.com", + "favicon": "https://imgs.search.brave.com/Ux4Hee4evZhvjuTKwtapBycOGjGDci2Gvn2pbSzvbC0/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTkyZTZiMWU3/YzU3Nzc5YjExYzUy/N2VhZTIxOWNlYjM5/ZGVjN2MyZDY4Nzdh/ZDYzMTYxNmI5N2Rk/Y2Q3N2FkNy93d3cu/eW91dHViZS5jb20v", + "path": "› watch" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/nCoLEcWkKtiecprWbS6nufwGCaSbPH7o0-sMeIkFmjI/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9pLnl0/aW1nLmNvbS92aS9B/b3Z4THI4alVINC9o/cWRlZmF1bHQuanBn" + } + } + ], + "mutated_by_goggles": false + }, + "web": { + "type": "search", + "results": [ + { + "title": "Welcome to Python.org", + "url": "https://www.python.org", + "is_source_local": false, + "is_source_both": false, + "description": "The official home of the Python Programming Language", + "page_age": "2023-09-09T15:55:05", + "profile": { + "name": "Python", + "url": "https://www.python.org", + "long_name": "python.org", + "img": "https://imgs.search.brave.com/vBaRH-v6oPS4csO4cdvuKhZ7-xDVvydin3oe3zXYxAI/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNTJjMzZjNDBj/MmIzODgwMGUyOTRj/Y2E5MjM3YjRkYTZj/YWI1Yzk1NTlmYTgw/ZDBjNzM0MGMxZjQz/YWFjNTczYy93d3cu/cHl0aG9uLm9yZy8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "python.org", + "hostname": "www.python.org", + "favicon": "https://imgs.search.brave.com/vBaRH-v6oPS4csO4cdvuKhZ7-xDVvydin3oe3zXYxAI/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNTJjMzZjNDBj/MmIzODgwMGUyOTRj/Y2E5MjM3YjRkYTZj/YWI1Yzk1NTlmYTgw/ZDBjNzM0MGMxZjQz/YWFjNTczYy93d3cu/cHl0aG9uLm9yZy8", + "path": "" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/GGfNfe5rxJ8QWEoxXniSLc0-POLU3qPyTIpuqPdbmXk/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/cHl0aG9uLm9yZy9z/dGF0aWMvb3Blbmdy/YXBoLWljb24tMjAw/eDIwMC5wbmc", + "original": "https://www.python.org/static/opengraph-icon-200x200.png", + "logo": false + }, + "age": "September 9, 2023", + "cluster_type": "generic", + "cluster": [ + { + "title": "Downloads", + "url": "https://www.python.org/downloads/", + "is_source_local": false, + "is_source_both": false, + "description": "The official home of the Python Programming Language", + "family_friendly": true + }, + { + "title": "Macos", + "url": "https://www.python.org/downloads/macos/", + "is_source_local": false, + "is_source_both": false, + "description": "The official home of the Python Programming Language", + "family_friendly": true + }, + { + "title": "Windows", + "url": "https://www.python.org/downloads/windows/", + "is_source_local": false, + "is_source_both": false, + "description": "The official home of the Python Programming Language", + "family_friendly": true + }, + { + "title": "Getting Started", + "url": "https://www.python.org/about/gettingstarted/", + "is_source_local": false, + "is_source_both": false, + "description": "The official home of the Python Programming Language", + "family_friendly": true + } + ], + "extra_snippets": [ + "Calculations are simple with Python, and expression syntax is straightforward: the operators +, -, * and / work as expected; parentheses () can be used for grouping. More about simple math functions in Python 3.", + "The core of extensible programming is defining functions. Python allows mandatory and optional arguments, keyword arguments, and even arbitrary argument lists. More about defining functions in Python 3", + "Lists (known as arrays in other languages) are one of the compound data types that Python understands. Lists can be indexed, sliced and manipulated with other built-in functions. More about lists in Python 3", + "# Python 3: Simple output (with Unicode) >>> print(\"Hello, I'm Python!\") Hello, I'm Python! # Input, assignment >>> name = input('What is your name?\\n') >>> print('Hi, %s.' % name) What is your name? Python Hi, Python." + ] + }, + { + "title": "Python (programming language) - Wikipedia", + "url": "https://en.wikipedia.org/wiki/Python_(programming_language)", + "is_source_local": false, + "is_source_both": false, + "description": "Python is a high-level, general-purpose programming language. Its design philosophy emphasizes code readability with the use of significant indentation. Python is dynamically typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly procedural), ...", + "page_age": "2024-05-01T12:54:03", + "profile": { + "name": "Wikipedia", + "url": "https://en.wikipedia.org/wiki/Python_(programming_language)", + "long_name": "en.wikipedia.org", + "img": "https://imgs.search.brave.com/0kxnVOiqv-faZvOJc7zpym4Zin1CTs1f1svfNZSzmfU/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNjQwNGZhZWY0/ZTQ1YWUzYzQ3MDUw/MmMzMGY3NTQ0ZjNj/NDUwMDk5ZTI3MWRk/NWYyNTM4N2UwOTE0/NTI3ZDQzNy9lbi53/aWtpcGVkaWEub3Jn/Lw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "en.wikipedia.org", + "hostname": "en.wikipedia.org", + "favicon": "https://imgs.search.brave.com/0kxnVOiqv-faZvOJc7zpym4Zin1CTs1f1svfNZSzmfU/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNjQwNGZhZWY0/ZTQ1YWUzYzQ3MDUw/MmMzMGY3NTQ0ZjNj/NDUwMDk5ZTI3MWRk/NWYyNTM4N2UwOTE0/NTI3ZDQzNy9lbi53/aWtpcGVkaWEub3Jn/Lw", + "path": "› wiki › Python_(programming_language)" + }, + "age": "4 days ago", + "extra_snippets": [ + "Python is dynamically typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly procedural), object-oriented and functional programming. It is often described as a \"batteries included\" language due to its comprehensive standard library.", + "Guido van Rossum began working on Python in the late 1980s as a successor to the ABC programming language and first released it in 1991 as Python 0.9.0. Python 2.0 was released in 2000. Python 3.0, released in 2008, was a major revision not completely backward-compatible with earlier versions. Python 2.7.18, released in 2020, was the last release of Python 2.", + "Python was invented in the late 1980s by Guido van Rossum at Centrum Wiskunde & Informatica (CWI) in the Netherlands as a successor to the ABC programming language, which was inspired by SETL, capable of exception handling and interfacing with the Amoeba operating system.", + "Python consistently ranks as one of the most popular programming languages, and has gained widespread use in the machine learning community." + ] + }, + { + "title": "Python Tutorial", + "url": "https://www.w3schools.com/python/", + "is_source_local": false, + "is_source_both": false, + "description": "W3Schools offers free online tutorials, references and exercises in all the major languages of the web. Covering popular subjects like HTML, CSS, JavaScript, Python, SQL, Java, and many, many more.", + "page_age": "2017-12-07T00:00:00", + "profile": { + "name": "W3Schools", + "url": "https://www.w3schools.com/python/", + "long_name": "w3schools.com", + "img": "https://imgs.search.brave.com/JwO5r7z3HTBkU29vgNH_4rrSWLf2M4-8FMWNvbxrKX8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjVlMGVkZDVj/ZGMyZWRmMzAwODRi/ZDAwZGE4NWI3NmU4/MjRhNjEzOGFhZWY3/ZGViMjY1OWY2ZDYw/YTZiOGUyZS93d3cu/dzNzY2hvb2xzLmNv/bS8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "w3schools.com", + "hostname": "www.w3schools.com", + "favicon": "https://imgs.search.brave.com/JwO5r7z3HTBkU29vgNH_4rrSWLf2M4-8FMWNvbxrKX8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjVlMGVkZDVj/ZGMyZWRmMzAwODRi/ZDAwZGE4NWI3NmU4/MjRhNjEzOGFhZWY3/ZGViMjY1OWY2ZDYw/YTZiOGUyZS93d3cu/dzNzY2hvb2xzLmNv/bS8", + "path": "› python" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/EMfp8dodbJehmj0yCJh8317RHuaumsddnHI4bujvFcg/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/dzNzY2hvb2xzLmNv/bS9pbWFnZXMvdzNz/Y2hvb2xzX2xvZ29f/NDM2XzIucG5n", + "original": "https://www.w3schools.com/images/w3schools_logo_436_2.png", + "logo": true + }, + "age": "December 7, 2017", + "extra_snippets": [ + "Well organized and easy to understand Web building tutorials with lots of examples of how to use HTML, CSS, JavaScript, SQL, Python, PHP, Bootstrap, Java, XML and more.", + "HTML CSS JAVASCRIPT SQL PYTHON JAVA PHP HOW TO W3.CSS C C++ C# BOOTSTRAP REACT MYSQL JQUERY EXCEL XML DJANGO NUMPY PANDAS NODEJS R TYPESCRIPT ANGULAR GIT POSTGRESQL MONGODB ASP AI GO KOTLIN SASS VUE DSA GEN AI SCIPY AWS CYBERSECURITY DATA SCIENCE", + "Python Variables Variable Names Assign Multiple Values Output Variables Global Variables Variable Exercises Python Data Types Python Numbers Python Casting Python Strings", + "Python Strings Slicing Strings Modify Strings Concatenate Strings Format Strings Escape Characters String Methods String Exercises Python Booleans Python Operators Python Lists" + ] + }, + { + "title": "Online Python - IDE, Editor, Compiler, Interpreter", + "url": "https://www.online-python.com/", + "is_source_local": false, + "is_source_both": false, + "description": "Build and Run your Python code instantly. Online-Python is a quick and easy tool that helps you to build, compile, test your python programs.", + "profile": { + "name": "Online-python", + "url": "https://www.online-python.com/", + "long_name": "online-python.com", + "img": "https://imgs.search.brave.com/kfaEvapwHxSsRObO52-I-otYFPHpG1h7UXJyUqDM2Ec/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvZGYxODdjNWQ0/NjZjZTNiMjk5NDY1/MWI5MTgyYjU3Y2Q3/MTI3NGM5MjUzY2Fi/OGQ3MTQ4MmIxMTQx/ZTcxNWFhMC93d3cu/b25saW5lLXB5dGhv/bi5jb20v" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "online-python.com", + "hostname": "www.online-python.com", + "favicon": "https://imgs.search.brave.com/kfaEvapwHxSsRObO52-I-otYFPHpG1h7UXJyUqDM2Ec/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvZGYxODdjNWQ0/NjZjZTNiMjk5NDY1/MWI5MTgyYjU3Y2Q3/MTI3NGM5MjUzY2Fi/OGQ3MTQ4MmIxMTQx/ZTcxNWFhMC93d3cu/b25saW5lLXB5dGhv/bi5jb20v", + "path": "" + }, + "extra_snippets": [ + "Build, run, and share Python code online for free with the help of online-integrated python's development environment (IDE). It is one of the most efficient, dependable, and potent online compilers for the Python programming language. It is not necessary for you to bother about establishing a Python environment in your local.", + "It is one of the most efficient, dependable, and potent online compilers for the Python programming language. It is not necessary for you to bother about establishing a Python environment in your local. Now You can immediately execute the Python code in the web browser of your choice.", + "It is not necessary for you to bother about establishing a Python environment in your local. Now You can immediately execute the Python code in the web browser of your choice. Using this Python editor is simple and quick to get up and running with. Simply type in the programme, and then press the RUN button!", + "Now You can immediately execute the Python code in the web browser of your choice. Using this Python editor is simple and quick to get up and running with. Simply type in the programme, and then press the RUN button! The code can be saved online by choosing the SHARE option, which also gives you the ability to access your code from any location providing you have internet access." + ] + }, + { + "title": "Python · GitHub", + "url": "https://github.com/python", + "is_source_local": false, + "is_source_both": false, + "description": "Repositories related to the Python Programming language - Python", + "page_age": "2023-03-06T00:00:00", + "profile": { + "name": "GitHub", + "url": "https://github.com/python", + "long_name": "github.com", + "img": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "github.com", + "hostname": "github.com", + "favicon": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw", + "path": "› python" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/POoaRfu_7gfp-D_O3qMNJrwDqJNbiDu1HuBpNJ_MpVQ/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9hdmF0/YXJzLmdpdGh1YnVz/ZXJjb250ZW50LmNv/bS91LzE1MjU5ODE_/cz0yMDAmYW1wO3Y9/NA", + "original": "https://avatars.githubusercontent.com/u/1525981?s=200&v=4", + "logo": false + }, + "age": "March 6, 2023", + "extra_snippets": ["Configuration for Python planets (e.g. http://planetpython.org)"] + }, + { + "title": "Online Python Compiler (Interpreter)", + "url": "https://www.programiz.com/python-programming/online-compiler/", + "is_source_local": false, + "is_source_both": false, + "description": "Write and run Python code using our online compiler (interpreter). You can use Python Shell like IDLE, and take inputs from the user in our Python compiler.", + "page_age": "2020-06-02T00:00:00", + "profile": { + "name": "Programiz", + "url": "https://www.programiz.com/python-programming/online-compiler/", + "long_name": "programiz.com", + "img": "https://imgs.search.brave.com/ozj4JFayZ3Fs5c9eTp7M5g12azQ_Hblgu4dpTuHRz6U/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMGJlN2U1YjVi/Y2M3ZDU5OGMwMWNi/M2Q3YjhjOTM1ZTFk/Y2NkZjE4NGQwOGIx/MTQ4NjI2YmNhODVj/MzFkMmJhYy93d3cu/cHJvZ3JhbWl6LmNv/bS8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "programiz.com", + "hostname": "www.programiz.com", + "favicon": "https://imgs.search.brave.com/ozj4JFayZ3Fs5c9eTp7M5g12azQ_Hblgu4dpTuHRz6U/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMGJlN2U1YjVi/Y2M3ZDU5OGMwMWNi/M2Q3YjhjOTM1ZTFk/Y2NkZjE4NGQwOGIx/MTQ4NjI2YmNhODVj/MzFkMmJhYy93d3cu/cHJvZ3JhbWl6LmNv/bS8", + "path": "› python-programming › online-compiler" + }, + "age": "June 2, 2020", + "extra_snippets": [ + "Python Online Compiler Online R Compiler SQL Online Editor Online HTML/CSS Editor Online Java Compiler C Online Compiler C++ Online Compiler C# Online Compiler JavaScript Online Compiler Online GoLang Compiler Online PHP Compiler Online Swift Compiler Online Rust Compiler", + "# Online Python compiler (interpreter) to run Python online. # Write Python 3 code in this online editor and run it. print(\"Try programiz.pro\")" + ] + }, + { + "title": "Python Developer", + "url": "https://twitter.com/Python_Dv/status/1786763460992544791", + "is_source_local": false, + "is_source_both": false, + "description": "Python Developer", + "page_age": "2024-05-04T14:30:03", + "profile": { + "name": "X", + "url": "https://twitter.com/Python_Dv/status/1786763460992544791", + "long_name": "twitter.com", + "img": "https://imgs.search.brave.com/Zq483bGX0GnSgym-1P7iyOyEDX3PkDZSNT8m56F862A/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvN2MxOTUxNzhj/OTY1ZTQ3N2I0MjJk/MTY5NGM0MTRlYWVi/MjU1YWE2NDUwYmQ2/YTA2MDFhMDlkZDEx/NTAzZGNiNi90d2l0/dGVyLmNvbS8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "twitter.com", + "hostname": "twitter.com", + "favicon": "https://imgs.search.brave.com/Zq483bGX0GnSgym-1P7iyOyEDX3PkDZSNT8m56F862A/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvN2MxOTUxNzhj/OTY1ZTQ3N2I0MjJk/MTY5NGM0MTRlYWVi/MjU1YWE2NDUwYmQ2/YTA2MDFhMDlkZDEx/NTAzZGNiNi90d2l0/dGVyLmNvbS8", + "path": "› Python_Dv › status › 1786763460992544791" + }, + "age": "20 hours ago" + }, + { + "title": "input table name? - python script - KNIME Extensions - KNIME Community Forum", + "url": "https://forum.knime.com/t/input-table-name-python-script/78978", + "is_source_local": false, + "is_source_both": false, + "description": "Hi, when running a python script node, I get the error seen on the screenshot Same happens with this code too: The script input is output from the csv reader node. How can I get the right name for that table? Best wishes, Dario", + "page_age": "2024-05-04T09:20:44", + "profile": { + "name": "Knime", + "url": "https://forum.knime.com/t/input-table-name-python-script/78978", + "long_name": "forum.knime.com", + "img": "https://imgs.search.brave.com/WQoOhAD5i6uEhJ-qXvlWMJwbGA52f2Ycc_ns36EK698/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTAxNzMxNjFl/MzJjNzU5NzRkOTMz/Mjg4NDU2OWUxM2Rj/YzVkOGM3MzIwNzI2/YTY1NzYxNzA1MDE5/NzQzOWU3NC9mb3J1/bS5rbmltZS5jb20v" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "article", + "meta_url": { + "scheme": "https", + "netloc": "forum.knime.com", + "hostname": "forum.knime.com", + "favicon": "https://imgs.search.brave.com/WQoOhAD5i6uEhJ-qXvlWMJwbGA52f2Ycc_ns36EK698/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTAxNzMxNjFl/MzJjNzU5NzRkOTMz/Mjg4NDU2OWUxM2Rj/YzVkOGM3MzIwNzI2/YTY1NzYxNzA1MDE5/NzQzOWU3NC9mb3J1/bS5rbmltZS5jb20v", + "path": " › knime extensions" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/DtEl38dcvuM1kGfhN0T5HfOrsMJcztWNyriLvtDJmKI/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9mb3J1/bS1jZG4ua25pbWUu/Y29tL3VwbG9hZHMv/ZGVmYXVsdC9vcmln/aW5hbC8zWC9lLzYv/ZTY0M2M2NzFlNzAz/MDg2MjkwMWY2YzJh/OWFjOWI5ZmEwM2M3/ZjMwZi5wbmc", + "original": "https://forum-cdn.knime.com/uploads/default/original/3X/e/6/e643c671e7030862901f6c2a9ac9b9fa03c7f30f.png", + "logo": false + }, + "age": "1 day ago", + "extra_snippets": [ + "Hi, when running a python script node, I get the error seen on the screenshot Same happens with this code too: The script input is output from the csv reader node. How can I get the right name for that table? …" + ] + }, + { + "title": "What does the Double Star operator mean in Python? - GeeksforGeeks", + "url": "https://www.geeksforgeeks.org/what-does-the-double-star-operator-mean-in-python/", + "is_source_local": false, + "is_source_both": false, + "description": "A Computer Science portal for geeks. It contains well written, well thought and well explained computer science and programming articles, quizzes and practice/competitive programming/company interview Questions.", + "page_age": "2023-03-14T17:15:04", + "profile": { + "name": "GeeksforGeeks", + "url": "https://www.geeksforgeeks.org/what-does-the-double-star-operator-mean-in-python/", + "long_name": "geeksforgeeks.org", + "img": "https://imgs.search.brave.com/fhzcfv5xltx6-YBvJI9RZgS7xZo0dPNaASsrB8YOsCs/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjBhOGQ3MmNi/ZWE5N2EwMmZjYzA1/ZTI0ZTFhMGUyMTE0/MGM0ZTBmMWZlM2Y2/Yzk2ODMxZTRhYTBi/NDdjYTE0OS93d3cu/Z2Vla3Nmb3JnZWVr/cy5vcmcv" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "article", + "meta_url": { + "scheme": "https", + "netloc": "geeksforgeeks.org", + "hostname": "www.geeksforgeeks.org", + "favicon": "https://imgs.search.brave.com/fhzcfv5xltx6-YBvJI9RZgS7xZo0dPNaASsrB8YOsCs/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjBhOGQ3MmNi/ZWE5N2EwMmZjYzA1/ZTI0ZTFhMGUyMTE0/MGM0ZTBmMWZlM2Y2/Yzk2ODMxZTRhYTBi/NDdjYTE0OS93d3cu/Z2Vla3Nmb3JnZWVr/cy5vcmcv", + "path": "› what-does-the-double-star-operator-mean-in-python" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/GcR-j_dLbyHkbHEI3ffLMi6xpXGhF_2Z8POIoqtokhM/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9tZWRp/YS5nZWVrc2Zvcmdl/ZWtzLm9yZy93cC1j/b250ZW50L3VwbG9h/ZHMvZ2ZnXzIwMFgy/MDAtMTAweDEwMC5w/bmc", + "original": "https://media.geeksforgeeks.org/wp-content/uploads/gfg_200X200-100x100.png", + "logo": false + }, + "age": "March 14, 2023", + "extra_snippets": [ + "Difference between / vs. // operator in Python", + "Double Star or (**) is one of the Arithmetic Operator (Like +, -, *, **, /, //, %) in Python Language. It is also known as Power Operator.", + "The time complexity of the given Python program is O(n), where n is the number of key-value pairs in the input dictionary.", + "Inplace Operators in Python | Set 2 (ixor(), iand(), ipow(),…)" + ] + }, + { + "title": "r/Python", + "url": "https://www.reddit.com/r/Python/", + "is_source_local": false, + "is_source_both": false, + "description": "The official Python community for Reddit! Stay up to date with the latest news, packages, and meta information relating to the Python programming language. --- If you have questions or are new to Python use r/LearnPython", + "page_age": "2022-12-30T16:25:02", + "profile": { + "name": "Reddit", + "url": "https://www.reddit.com/r/Python/", + "long_name": "reddit.com", + "img": "https://imgs.search.brave.com/mAZYEK9Wi13WLDUge7XZ8YuDTwm6DP6gBjvz1GdYZVY/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvN2ZiNTU0M2Nj/MTFhZjRiYWViZDlk/MjJiMjBjMzFjMDRk/Y2IzYWI0MGI0MjVk/OGY5NzQzOGQ5NzQ5/NWJhMWI0NC93d3cu/cmVkZGl0LmNvbS8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "reddit.com", + "hostname": "www.reddit.com", + "favicon": "https://imgs.search.brave.com/mAZYEK9Wi13WLDUge7XZ8YuDTwm6DP6gBjvz1GdYZVY/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvN2ZiNTU0M2Nj/MTFhZjRiYWViZDlk/MjJiMjBjMzFjMDRk/Y2IzYWI0MGI0MjVk/OGY5NzQzOGQ5NzQ5/NWJhMWI0NC93d3cu/cmVkZGl0LmNvbS8", + "path": "› r › Python" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/zWd10t3zg34ciHiAB-K5WWK3h_H4LedeDot9BVX7Ydo/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9zdHls/ZXMucmVkZGl0bWVk/aWEuY29tL3Q1XzJx/aDB5L3N0eWxlcy9j/b21tdW5pdHlJY29u/X2NpZmVobDR4dDdu/YzEucG5n", + "original": "https://styles.redditmedia.com/t5_2qh0y/styles/communityIcon_cifehl4xt7nc1.png", + "logo": false + }, + "age": "December 30, 2022", + "extra_snippets": [ + "r/Python: The official Python community for Reddit! Stay up to date with the latest news, packages, and meta information relating to the Python…", + "By default, Python allows you to import and use anything, anywhere. Over time, this results in modules that were intended to be separate getting tightly coupled together, and domain boundaries breaking down. We experienced this first-hand at a unicorn startup, where the eng team paused development for over a year in an attempt to split up packages into independent services.", + "Hello r/Python! It's time to share what you've been working on! Whether it's a work-in-progress, a completed masterpiece, or just a rough idea, let us know what you're up to!", + "Whether it's your job, your hobby, or your passion project, all Python-related work is welcome here." + ] + }, + { + "title": "GitHub - python/cpython: The Python programming language", + "url": "https://github.com/python/cpython", + "is_source_local": false, + "is_source_both": false, + "description": "The Python programming language. Contribute to python/cpython development by creating an account on GitHub.", + "page_age": "2022-10-29T00:00:00", + "profile": { + "name": "GitHub", + "url": "https://github.com/python/cpython", + "long_name": "github.com", + "img": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "software", + "meta_url": { + "scheme": "https", + "netloc": "github.com", + "hostname": "github.com", + "favicon": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw", + "path": "› python › cpython" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/BJbWFRUqgP-tKIyGK9ByXjuYjHO2mtYigUOEFNz_gXk/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9vcGVu/Z3JhcGguZ2l0aHVi/YXNzZXRzLmNvbS82/MTY5YmJkNTQ0YzAy/NDg0MGU4NDdjYTU1/YTU3ZGZmMDA2ZDAw/YWQ1NDIzOTFmYTQ3/YmJjODg3OWM0NWYw/MTZhL3B5dGhvbi9j/cHl0aG9u", + "original": "https://opengraph.githubassets.com/6169bbd544c024840e847ca55a57dff006d00ad542391fa47bbc8879c45f016a/python/cpython", + "logo": false + }, + "age": "October 29, 2022", + "extra_snippets": [ + "You can pass many options to the configure script; run ./configure --help to find out more. On macOS case-insensitive file systems and on Cygwin, the executable is called python.exe; elsewhere it's just python.", + "Building a complete Python installation requires the use of various additional third-party libraries, depending on your build platform and configure options. Not all standard library modules are buildable or useable on all platforms. Refer to the Install dependencies section of the Developer Guide for current detailed information on dependencies for various Linux distributions and macOS.", + "To get an optimized build of Python, configure --enable-optimizations before you run make. This sets the default make targets up to enable Profile Guided Optimization (PGO) and may be used to auto-enable Link Time Optimization (LTO) on some platforms. For more details, see the sections below.", + "Copyright © 2001-2024 Python Software Foundation. All rights reserved." + ] + }, + { + "title": "5. Data Structures — Python 3.12.3 documentation", + "url": "https://docs.python.org/3/tutorial/datastructures.html", + "is_source_local": false, + "is_source_both": false, + "description": "This chapter describes some things you’ve learned about already in more detail, and adds some new things as well. More on Lists: The list data type has some more methods. Here are all of the method...", + "page_age": "2023-07-04T00:00:00", + "profile": { + "name": "Python documentation", + "url": "https://docs.python.org/3/tutorial/datastructures.html", + "long_name": "docs.python.org", + "img": "https://imgs.search.brave.com/F5Ym7eSElhGdGUFKLRxDj9Z_tc180ldpeMvQ2Q6ARbA/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMTUzOTFjOGVi/YTcyOTVmODA3ODIy/YjE2NzFjY2ViMjhl/NzRlY2JhYTc5YjNm/ZjhmODAyZWI2OGUw/ZjU4NDVlNy9kb2Nz/LnB5dGhvbi5vcmcv" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "docs.python.org", + "hostname": "docs.python.org", + "favicon": "https://imgs.search.brave.com/F5Ym7eSElhGdGUFKLRxDj9Z_tc180ldpeMvQ2Q6ARbA/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMTUzOTFjOGVi/YTcyOTVmODA3ODIy/YjE2NzFjY2ViMjhl/NzRlY2JhYTc5YjNm/ZjhmODAyZWI2OGUw/ZjU4NDVlNy9kb2Nz/LnB5dGhvbi5vcmcv", + "path": "› 3 › tutorial › datastructures.html" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/Y7GrMRF8WorDIMLuOl97XC8ltYpoOCqNwWF2pQIIKls/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9kb2Nz/LnB5dGhvbi5vcmcv/My9fc3RhdGljL29n/LWltYWdlLnBuZw", + "original": "https://docs.python.org/3/_static/og-image.png", + "logo": false + }, + "age": "July 4, 2023", + "extra_snippets": [ + "You might have noticed that methods like insert, remove or sort that only modify the list have no return value printed – they return the default None. [1] This is a design principle for all mutable data structures in Python.", + "We saw that lists and strings have many common properties, such as indexing and slicing operations. They are two examples of sequence data types (see Sequence Types — list, tuple, range). Since Python is an evolving language, other sequence data types may be added. There is also another standard sequence data type: the tuple.", + "Python also includes a data type for sets. A set is an unordered collection with no duplicate elements. Basic uses include membership testing and eliminating duplicate entries. Set objects also support mathematical operations like union, intersection, difference, and symmetric difference.", + "Another useful data type built into Python is the dictionary (see Mapping Types — dict). Dictionaries are sometimes found in other languages as “associative memories” or “associative arrays”. Unlike sequences, which are indexed by a range of numbers, dictionaries are indexed by keys, which can be any immutable type; strings and numbers can always be keys." + ] + }, + { + "title": "Something wrong with python packages / AUR Issues, Discussion & PKGBUILD Requests / Arch Linux Forums", + "url": "https://bbs.archlinux.org/viewtopic.php?id=295466", + "is_source_local": false, + "is_source_both": false, + "description": "Big Python updates require Python packages to be rebuild. For some reason they didn't think a bump that made it necessary to rebuild half the official repo was a news post.", + "page_age": "2024-05-04T08:30:02", + "profile": { + "name": "Archlinux", + "url": "https://bbs.archlinux.org/viewtopic.php?id=295466", + "long_name": "bbs.archlinux.org", + "img": "https://imgs.search.brave.com/3au9oqkzSri_aLEec3jo-0bFgLuICkydrWfjFcC8lkI/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNWNkODM1MWJl/ZmJhMzkzNzYzMDkz/NmEyMWMxNjI5MjNk/NGJmZjFhNTBlZDNl/Mzk5MzJjOGZkYjZl/MjNmY2IzNS9iYnMu/YXJjaGxpbnV4Lm9y/Zy8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "bbs.archlinux.org", + "hostname": "bbs.archlinux.org", + "favicon": "https://imgs.search.brave.com/3au9oqkzSri_aLEec3jo-0bFgLuICkydrWfjFcC8lkI/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvNWNkODM1MWJl/ZmJhMzkzNzYzMDkz/NmEyMWMxNjI5MjNk/NGJmZjFhNTBlZDNl/Mzk5MzJjOGZkYjZl/MjNmY2IzNS9iYnMu/YXJjaGxpbnV4Lm9y/Zy8", + "path": "› viewtopic.php" + }, + "age": "1 day ago", + "extra_snippets": [ + "Traceback (most recent call last): File \"/usr/lib/python3.12/importlib/metadata/__init__.py\", line 397, in from_name return next(cls.discover(name=name)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ StopIteration During handling of the above exception, another exception occurred: Traceback (most recent call last): File \"/usr/bin/informant\", line 33, in sys.exit(load_entry_point('informant==0.5.0', 'console_scripts', 'informant')()) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File \"/usr/bin/informant\", line 22, in importlib_load_entry_point for entry_point in distribution(dis" + ] + }, + { + "title": "Introduction to Python", + "url": "https://www.w3schools.com/python/python_intro.asp", + "is_source_local": false, + "is_source_both": false, + "description": "W3Schools offers free online tutorials, references and exercises in all the major languages of the web. Covering popular subjects like HTML, CSS, JavaScript, Python, SQL, Java, and many, many more.", + "profile": { + "name": "W3Schools", + "url": "https://www.w3schools.com/python/python_intro.asp", + "long_name": "w3schools.com", + "img": "https://imgs.search.brave.com/JwO5r7z3HTBkU29vgNH_4rrSWLf2M4-8FMWNvbxrKX8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjVlMGVkZDVj/ZGMyZWRmMzAwODRi/ZDAwZGE4NWI3NmU4/MjRhNjEzOGFhZWY3/ZGViMjY1OWY2ZDYw/YTZiOGUyZS93d3cu/dzNzY2hvb2xzLmNv/bS8" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "w3schools.com", + "hostname": "www.w3schools.com", + "favicon": "https://imgs.search.brave.com/JwO5r7z3HTBkU29vgNH_4rrSWLf2M4-8FMWNvbxrKX8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjVlMGVkZDVj/ZGMyZWRmMzAwODRi/ZDAwZGE4NWI3NmU4/MjRhNjEzOGFhZWY3/ZGViMjY1OWY2ZDYw/YTZiOGUyZS93d3cu/dzNzY2hvb2xzLmNv/bS8", + "path": "› python › python_intro.asp" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/EMfp8dodbJehmj0yCJh8317RHuaumsddnHI4bujvFcg/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/dzNzY2hvb2xzLmNv/bS9pbWFnZXMvdzNz/Y2hvb2xzX2xvZ29f/NDM2XzIucG5n", + "original": "https://www.w3schools.com/images/w3schools_logo_436_2.png", + "logo": true + }, + "extra_snippets": [ + "Well organized and easy to understand Web building tutorials with lots of examples of how to use HTML, CSS, JavaScript, SQL, Python, PHP, Bootstrap, Java, XML and more.", + "HTML CSS JAVASCRIPT SQL PYTHON JAVA PHP HOW TO W3.CSS C C++ C# BOOTSTRAP REACT MYSQL JQUERY EXCEL XML DJANGO NUMPY PANDAS NODEJS R TYPESCRIPT ANGULAR GIT POSTGRESQL MONGODB ASP AI GO KOTLIN SASS VUE DSA GEN AI SCIPY AWS CYBERSECURITY DATA SCIENCE", + "Python Variables Variable Names Assign Multiple Values Output Variables Global Variables Variable Exercises Python Data Types Python Numbers Python Casting Python Strings", + "Python Strings Slicing Strings Modify Strings Concatenate Strings Format Strings Escape Characters String Methods String Exercises Python Booleans Python Operators Python Lists" + ] + }, + { + "title": "bug: AUR package wants to use python but does not find any preset version · Issue #1740 · asdf-vm/asdf", + "url": "https://github.com/asdf-vm/asdf/issues/1740", + "is_source_local": false, + "is_source_both": false, + "description": "Describe the Bug I am not sure why this is happening, I am trying to install tlpui from AUR and it fails, here are some logs to help: ==> Making package: tlpui 2:1.6.5-1 (Mi 10 apr 2024 23:19:15 +0...", + "page_age": "2024-05-04T06:45:04", + "profile": { + "name": "GitHub", + "url": "https://github.com/asdf-vm/asdf/issues/1740", + "long_name": "github.com", + "img": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "software", + "meta_url": { + "scheme": "https", + "netloc": "github.com", + "hostname": "github.com", + "favicon": "https://imgs.search.brave.com/v8685zI4XInM0zxlNI2s7oE_2Sb-EL7lAy81WXbkQD8/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYWQyNWM1NjA5/ZjZmZjNlYzI2MDNk/N2VkNmJhYjE2MzZl/MDY5ZTMxMDUzZmY1/NmU3NWIzNWVmMjk0/NTBjMjJjZi9naXRo/dWIuY29tLw", + "path": "› asdf-vm › asdf › issues › 1740" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/KrLW5s_2n4jyP8XLbc3ZPVBaLD963tQgWzG9EWPZlQs/rs:fit:200:200:1/g:ce/aHR0cHM6Ly9vcGVu/Z3JhcGguZ2l0aHVi/YXNzZXRzLmNvbS81/MTE0ZTdkOGIwODM2/YmQ2MTY3NzQ1ZGI4/MmZjMGE3OGUyMjcw/MGFlY2ZjMWZkODBl/MDYzZTNiN2ZjOWNj/NzYyL2FzZGYtdm0v/YXNkZi9pc3N1ZXMv/MTc0MA", + "original": "https://opengraph.githubassets.com/5114e7d8b0836bd6167745db82fc0a78e22700aecfc1fd80e063e3b7fc9cc762/asdf-vm/asdf/issues/1740", + "logo": false + }, + "age": "1 day ago", + "extra_snippets": [ + "==> Starting build()... No preset version installed for command python Please install a version by running one of the following: asdf install python 3.8 or add one of the following versions in your config file at /home/ferret/.tool-versions python 3.11.0 python 3.12.1 python 3.12.3 ==> ERROR: A failure occurred in build(). Aborting...", + "-> error making: tlpui-exit status 4 -> Failed to install the following packages. Manual intervention is required: tlpui - exit status 4 ferret@FX505DT in ~ $ cat /home/ferret/.tool-versions nodejs 21.6.0 python 3.12.3 ferret@FX505DT in ~ $ python -V Python 3.12.3 ferret@FX505DT in ~ $ which python /home/ferret/.asdf/shims/python", + "Describe the Bug I am not sure why this is happening, I am trying to install tlpui from AUR and it fails, here are some logs to help: ==> Making package: tlpui 2:1.6.5-1 (Mi 10 apr 2024 23:19:15 +0300) ==> Retrieving sources... -> Found ..." + ] + }, + { + "title": "What are python.exe and python3.exe, and why do they appear to point to App Installer? | Windows 11 Forum", + "url": "https://www.elevenforum.com/t/what-are-python-exe-and-python3-exe-and-why-do-they-appear-to-point-to-app-installer.24886/", + "is_source_local": false, + "is_source_both": false, + "description": "I was looking at App execution aliases (Settings > Apps > Advanced app settings > App execution aliases) on my new computer -- my first Windows 11 computer. Why are python.exe and python3.exe listed as App Installer? I assume that App Installer refers to installation of Microsoft Store / UWP...", + "page_age": "2024-05-03T17:30:04", + "profile": { + "name": "Windows 11 Forum", + "url": "https://www.elevenforum.com/t/what-are-python-exe-and-python3-exe-and-why-do-they-appear-to-point-to-app-installer.24886/", + "long_name": "elevenforum.com", + "img": "https://imgs.search.brave.com/XVRAYMEj6Im8i7jV5RxeTwpiRPtY9IWg4wRIuh-WhEw/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvZjk5MDZkMDIw/M2U1OWIwNjM5Y2U1/M2U2NzNiNzVkNTA5/NzA5OTI1ZTFmOTc4/MzU3OTlhYzU5OTVi/ZGNjNTY4MS93d3cu/ZWxldmVuZm9ydW0u/Y29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "elevenforum.com", + "hostname": "www.elevenforum.com", + "favicon": "https://imgs.search.brave.com/XVRAYMEj6Im8i7jV5RxeTwpiRPtY9IWg4wRIuh-WhEw/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvZjk5MDZkMDIw/M2U1OWIwNjM5Y2U1/M2U2NzNiNzVkNTA5/NzA5OTI1ZTFmOTc4/MzU3OTlhYzU5OTVi/ZGNjNTY4MS93d3cu/ZWxldmVuZm9ydW0u/Y29tLw", + "path": " › windows support forums › apps and software" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/DVoFcE6d_-lx3BVGNS-RZK_lZzxQ8VhwZVf3AVqEJFA/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/ZWxldmVuZm9ydW0u/Y29tL2RhdGEvYXNz/ZXRzL2xvZ28vbWV0/YTEtMjAxLnBuZw", + "original": "https://www.elevenforum.com/data/assets/logo/meta1-201.png", + "logo": true + }, + "age": "2 days ago", + "extra_snippets": [ + "Why are python.exe and python3.exe listed as App Installer? I assume that App Installer refers to installation of Microsoft Store / UWP apps, but if that's the case, then why are they called python.exe and python3.exe? Or are python.exe and python3.exe simply serving as aliases / pointers pointing to App Installer, which is itself a Microsoft Store App?", + "Or are python.exe and python3.exe simply serving as aliases / pointers pointing to App Installer, which is itself a Microsoft Store App? I wish to soon install Python, along with an integrated development editor (IDE), on my machine, so that I can code in Python.", + "I wish to soon install Python, along with an integrated development editor (IDE), on my machine, so that I can code in Python. But is a Python interpreter already on my computer as suggested, if obliquely, by the presence of python.exe and python3.exe? I kind of doubt it." + ] + }, + { + "title": "How to Watermark Your Images Using Python OpenCV in ...", + "url": "https://medium.com/@daily_data_prep/how-to-watermark-your-images-using-python-opencv-in-bulk-e472085389a1", + "is_source_local": false, + "is_source_both": false, + "description": "Medium is an open platform where readers find dynamic thinking, and where expert and undiscovered voices can share their writing on any topic.", + "page_age": "2024-05-03T14:05:06", + "profile": { + "name": "Medium", + "url": "https://medium.com/@daily_data_prep/how-to-watermark-your-images-using-python-opencv-in-bulk-e472085389a1", + "long_name": "medium.com", + "img": "https://imgs.search.brave.com/qvE2kIQCiAsnPv2C6P9xM5J2VVWdm55g-A-2Q_yIJ0g/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTZhYmQ1N2Q4/NDg4ZDcyODIyMDZi/MzFmOWNhNjE3Y2E4/Y2YzMThjNjljNDIx/ZjllZmNhYTcwODhl/YTcwNDEzYy9tZWRp/dW0uY29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "medium.com", + "hostname": "medium.com", + "favicon": "https://imgs.search.brave.com/qvE2kIQCiAsnPv2C6P9xM5J2VVWdm55g-A-2Q_yIJ0g/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvOTZhYmQ1N2Q4/NDg4ZDcyODIyMDZi/MzFmOWNhNjE3Y2E4/Y2YzMThjNjljNDIx/ZjllZmNhYTcwODhl/YTcwNDEzYy9tZWRp/dW0uY29tLw", + "path": "› @daily_data_prep › how-to-watermark-your-images-using-python-opencv-in-bulk-e472085389a1" + }, + "age": "2 days ago" + }, + { + "title": "Increment and Decrement Operators in Python?", + "url": "https://www.tutorialspoint.com/increment-and-decrement-operators-in-python", + "is_source_local": false, + "is_source_both": false, + "description": "Increment and Decrement Operators in Python - Python does not have unary increment/decrement operator (++/--). Instead to increment a value, usea += 1to decrement a value, use −a -= 1Example>>> a = 0 >>> >>> #Increment >>> a +=1 >>> >>> #Decrement >>> a -= 1 >>> >>> #value of a >>> a 0Python ...", + "page_age": "2023-08-23T00:00:00", + "profile": { + "name": "Tutorialspoint", + "url": "https://www.tutorialspoint.com/increment-and-decrement-operators-in-python", + "long_name": "tutorialspoint.com", + "img": "https://imgs.search.brave.com/Wt8BSkivPlFwcU5yBtf7YzuvTuRExyd_502cdABCS5c/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjcyYjAzYmVl/ODU4MzZiMjJiYTFh/MjJhZDNmNWE4YzA5/MDgyYTZhMDg3NTYw/M2NiY2NiZTUxN2I5/MjU1MWFmMS93d3cu/dHV0b3JpYWxzcG9p/bnQuY29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "tutorialspoint.com", + "hostname": "www.tutorialspoint.com", + "favicon": "https://imgs.search.brave.com/Wt8BSkivPlFwcU5yBtf7YzuvTuRExyd_502cdABCS5c/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYjcyYjAzYmVl/ODU4MzZiMjJiYTFh/MjJhZDNmNWE4YzA5/MDgyYTZhMDg3NTYw/M2NiY2NiZTUxN2I5/MjU1MWFmMS93d3cu/dHV0b3JpYWxzcG9p/bnQuY29tLw", + "path": "› increment-and-decrement-operators-in-python" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/ddG5vyZGLVudvecEbQJPeG8tGuaZ7g3Xz6Gyjdl5WA8/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/dHV0b3JpYWxzcG9p/bnQuY29tL2ltYWdl/cy90cF9sb2dvXzQz/Ni5wbmc", + "original": "https://www.tutorialspoint.com/images/tp_logo_436.png", + "logo": true + }, + "age": "August 23, 2023", + "extra_snippets": [ + "Increment and Decrement Operators in Python - Python does not have unary increment/decrement operator (++/--). Instead to increment a value, usea += 1to decrement a value, use −a -= 1Example>>> a = 0 >>> >>> #Increment >>> a +=1 >>> >>> #Decrement >>> a -= 1 >>> >>> #value of a >>> a 0Python does not provide multiple ways to do the same thing", + "So what above statement means in python is: create an object of type int having value 1 and give the name a to it. The object is an instance of int having value 1 and the name a refers to it. The assigned name a and the object to which it refers are distinct.", + "Python does not provide multiple ways to do the same thing .", + "However, be careful if you are coming from a language like C, Python doesn’t have \"variables\" in the sense that C does, instead python uses names and objects and in python integers (int’s) are immutable." + ] + }, + { + "title": "Gumroad – How not to suck at Python / SideFX Houdini | CG Persia", + "url": "https://cgpersia.com/2024/05/gumroad-how-not-to-suck-at-python-sidefx-houdini-195370.html", + "is_source_local": false, + "is_source_both": false, + "description": "Info: This course is made for artists or TD (technical director) willing to learn Python to improve their workflows inside SideFX Houdini, get faster in production and develop all the tools you always wished you had.", + "page_age": "2024-05-03T08:35:03", + "profile": { + "name": "Cgpersia", + "url": "https://cgpersia.com/2024/05/gumroad-how-not-to-suck-at-python-sidefx-houdini-195370.html", + "long_name": "cgpersia.com", + "img": "https://imgs.search.brave.com/VjyaopAm-M9sWvM7n-KnGZ3T5swIOwwE80iF5QVqQPg/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYmE0MzQ4NmI2/NjFhMTA1ZDBiN2Iw/ZWNiNDUxNjUwYjdh/MGE5ZjQ0ZjIxNzll/NmVkZDE2YzYyMDBh/NDNiMDgwMy9jZ3Bl/cnNpYS5jb20v" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "cgpersia.com", + "hostname": "cgpersia.com", + "favicon": "https://imgs.search.brave.com/VjyaopAm-M9sWvM7n-KnGZ3T5swIOwwE80iF5QVqQPg/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvYmE0MzQ4NmI2/NjFhMTA1ZDBiN2Iw/ZWNiNDUxNjUwYjdh/MGE5ZjQ0ZjIxNzll/NmVkZDE2YzYyMDBh/NDNiMDgwMy9jZ3Bl/cnNpYS5jb20v", + "path": "› 2024 › 05 › gumroad-how-not-to-suck-at-python-sidefx-houdini-195370.html" + }, + "age": "2 days ago", + "extra_snippets": [ + "Posted in: 2D, CG Releases, Downloads, Learning, Tutorials, Videos. Tagged: Gumroad, Python, Sidefx. Leave a Comment", + "01 – Python – Fundamentals Get the Fundamentals of python before starting the fun stuff ! 02 – Python Construction Part02 digging further into python concepts 03 – Houdini – Python Basics Applying some basic python in Houdini and starting to make tools !", + "02 – Python Construction Part02 digging further into python concepts 03 – Houdini – Python Basics Applying some basic python in Houdini and starting to make tools ! 04 – Houdini – Python Intermediate Applying some more advanced python in Houdini to make tools ! 05 – Houdini – Python Expert Using QtDesigner in combinaison with Houdini Python/Pyside to create advanced tools." + ] + }, + { + "title": "How to install Python: The complete Python programmer’s guide", + "url": "https://www.pluralsight.com/resources/blog/software-development/python-installation-guide", + "is_source_local": false, + "is_source_both": false, + "description": "An easy guide on how set up your operating system so you can program in Python, and how to update or uninstall it. For Linux, Windows, and macOS.", + "page_age": "2024-05-02T07:30:02", + "profile": { + "name": "Pluralsight", + "url": "https://www.pluralsight.com/resources/blog/software-development/python-installation-guide", + "long_name": "pluralsight.com", + "img": "https://imgs.search.brave.com/zvwQNSVu9-jR2CRlNcsTzxjaXKPlXNuh-Jo9-0yA1OE/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMTNkNWQyNjk3/M2Q0NzYyMmUyNDc3/ZjYwMWFlZDI5YTI4/ODhmYzc2MDkzMjAy/MjNkMWY1MDE3NTQw/MzI5NWVkZS93d3cu/cGx1cmFsc2lnaHQu/Y29tLw" + }, + "language": "en", + "family_friendly": true, + "type": "search_result", + "subtype": "generic", + "meta_url": { + "scheme": "https", + "netloc": "pluralsight.com", + "hostname": "www.pluralsight.com", + "favicon": "https://imgs.search.brave.com/zvwQNSVu9-jR2CRlNcsTzxjaXKPlXNuh-Jo9-0yA1OE/rs:fit:32:32:1/g:ce/aHR0cDovL2Zhdmlj/b25zLnNlYXJjaC5i/cmF2ZS5jb20vaWNv/bnMvMTNkNWQyNjk3/M2Q0NzYyMmUyNDc3/ZjYwMWFlZDI5YTI4/ODhmYzc2MDkzMjAy/MjNkMWY1MDE3NTQw/MzI5NWVkZS93d3cu/cGx1cmFsc2lnaHQu/Y29tLw", + "path": " › blog › blog" + }, + "thumbnail": { + "src": "https://imgs.search.brave.com/xrv5PHH2Bzmq2rcIYzk__8h5RqCj6kS3I6SGCNw5dZM/rs:fit:200:200:1/g:ce/aHR0cHM6Ly93d3cu/cGx1cmFsc2lnaHQu/Y29tL2NvbnRlbnQv/ZGFtL3BzL2ltYWdl/cy9yZXNvdXJjZS1j/ZW50ZXIvYmxvZy9o/ZWFkZXItaGVyby1p/bWFnZXMvUHl0aG9u/LndlYnA", + "original": "https://www.pluralsight.com/content/dam/ps/images/resource-center/blog/header-hero-images/Python.webp", + "logo": false + }, + "age": "3 days ago", + "extra_snippets": [ + "Whether it’s your first time programming or you’re a seasoned programmer, you’ll have to install or update Python every now and then --- or if necessary, uninstall it. In this article, you'll learn how to do just that.", + "Some systems come with Python, so to start off, we’ll first check to see if it’s installed on your system before we proceed. To do that, we’ll need to open a terminal. Since you might be new to programming, let’s go over how to open a terminal for Linux, Windows, and macOS.", + "Before we dive into setting up your system so you can program in Python, let’s talk terminal basics and benefits.", + "However, let’s focus on why we need it for working with Python. We use a terminal, or command line, to:" + ] + } + ], + "family_friendly": true + } +} diff --git a/backend/apps/rag/search/testdata/google_pse.json b/backend/apps/rag/search/testdata/google_pse.json new file mode 100644 index 0000000000000000000000000000000000000000..15da9729cde30eee86d0302f662c5ca37d71f65b --- /dev/null +++ b/backend/apps/rag/search/testdata/google_pse.json @@ -0,0 +1,442 @@ +{ + "kind": "customsearch#search", + "url": { + "type": "application/json", + "template": "https://www.googleapis.com/customsearch/v1?q={searchTerms}&num={count?}&start={startIndex?}&lr={language?}&safe={safe?}&cx={cx?}&sort={sort?}&filter={filter?}&gl={gl?}&cr={cr?}&googlehost={googleHost?}&c2coff={disableCnTwTranslation?}&hq={hq?}&hl={hl?}&siteSearch={siteSearch?}&siteSearchFilter={siteSearchFilter?}&exactTerms={exactTerms?}&excludeTerms={excludeTerms?}&linkSite={linkSite?}&orTerms={orTerms?}&dateRestrict={dateRestrict?}&lowRange={lowRange?}&highRange={highRange?}&searchType={searchType}&fileType={fileType?}&rights={rights?}&imgSize={imgSize?}&imgType={imgType?}&imgColorType={imgColorType?}&imgDominantColor={imgDominantColor?}&alt=json" + }, + "queries": { + "request": [ + { + "title": "Google Custom Search - lectures", + "totalResults": "2450000000", + "searchTerms": "lectures", + "count": 10, + "startIndex": 1, + "inputEncoding": "utf8", + "outputEncoding": "utf8", + "safe": "off", + "cx": "0473ef98502d44e18" + } + ], + "nextPage": [ + { + "title": "Google Custom Search - lectures", + "totalResults": "2450000000", + "searchTerms": "lectures", + "count": 10, + "startIndex": 11, + "inputEncoding": "utf8", + "outputEncoding": "utf8", + "safe": "off", + "cx": "0473ef98502d44e18" + } + ] + }, + "context": { + "title": "LLM Search" + }, + "searchInformation": { + "searchTime": 0.445959, + "formattedSearchTime": "0.45", + "totalResults": "2450000000", + "formattedTotalResults": "2,450,000,000" + }, + "items": [ + { + "kind": "customsearch#result", + "title": "The Feynman Lectures on Physics", + "htmlTitle": "The Feynman \u003cb\u003eLectures\u003c/b\u003e on Physics", + "link": "https://www.feynmanlectures.caltech.edu/", + "displayLink": "www.feynmanlectures.caltech.edu", + "snippet": "This edition has been designed for ease of reading on devices of any size or shape; text, figures and equations can all be zoomed without degradation.", + "htmlSnippet": "This edition has been designed for ease of reading on devices of any size or shape; text, figures and equations can all be zoomed without degradation.", + "cacheId": "CyXMWYWs9UEJ", + "formattedUrl": "https://www.feynmanlectures.caltech.edu/", + "htmlFormattedUrl": "https://www.feynman\u003cb\u003electures\u003c/b\u003e.caltech.edu/", + "pagemap": { + "metatags": [ + { + "viewport": "width=device-width, initial-scale=1.0" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Video Lectures", + "htmlTitle": "Video \u003cb\u003eLectures\u003c/b\u003e", + "link": "https://www.reddit.com/r/lectures/", + "displayLink": "www.reddit.com", + "snippet": "r/lectures: This subreddit is all about video lectures, talks and interesting public speeches. The topics include mathematics, physics, computer…", + "htmlSnippet": "r/\u003cb\u003electures\u003c/b\u003e: This subreddit is all about video \u003cb\u003electures\u003c/b\u003e, talks and interesting public speeches. The topics include mathematics, physics, computer…", + "formattedUrl": "https://www.reddit.com/r/lectures/", + "htmlFormattedUrl": "https://www.reddit.com/r/\u003cb\u003electures\u003c/b\u003e/", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTZtOjhfkgUKQbL3DZxe5F6OVsgeDNffleObjJ7n9RllKQTSsimax7VIaY&s", + "width": "192", + "height": "192" + } + ], + "metatags": [ + { + "og:image": "https://www.redditstatic.com/shreddit/assets/favicon/192x192.png", + "theme-color": "#000000", + "og:image:width": "256", + "og:type": "website", + "twitter:card": "summary", + "twitter:title": "r/lectures", + "og:site_name": "Reddit", + "og:title": "r/lectures", + "og:image:height": "256", + "bingbot": "noarchive", + "msapplication-navbutton-color": "#000000", + "og:description": "This subreddit is all about video lectures, talks and interesting public speeches.\n\nThe topics include mathematics, physics, computer science, programming, engineering, biology, medicine, economics, politics, social sciences, and any other subjects!", + "twitter:image": "https://www.redditstatic.com/shreddit/assets/favicon/192x192.png", + "apple-mobile-web-app-status-bar-style": "black", + "twitter:site": "@reddit", + "viewport": "width=device-width, initial-scale=1, viewport-fit=cover", + "apple-mobile-web-app-capable": "yes", + "og:ttl": "600", + "og:url": "https://www.reddit.com/r/lectures/" + } + ], + "cse_image": [ + { + "src": "https://www.redditstatic.com/shreddit/assets/favicon/192x192.png" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Lectures & Discussions | Flint Institute of Arts", + "htmlTitle": "\u003cb\u003eLectures\u003c/b\u003e & Discussions | Flint Institute of Arts", + "link": "https://flintarts.org/events/lectures", + "displayLink": "flintarts.org", + "snippet": "It will trace the intricate relationship between jewelry, attire, and the expression of personal identity, social hierarchy, and spiritual belief systems that ...", + "htmlSnippet": "It will trace the intricate relationship between jewelry, attire, and the expression of personal identity, social hierarchy, and spiritual belief systems that ...", + "cacheId": "jvpb9DxrfxoJ", + "formattedUrl": "https://flintarts.org/events/lectures", + "htmlFormattedUrl": "https://flintarts.org/events/\u003cb\u003electures\u003c/b\u003e", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcS23tMtAeNhJbOWdGxShYsmnyzFdzOC9Hb7lRykA9Pw72z1IlKTkjTdZw&s", + "width": "447", + "height": "113" + } + ], + "metatags": [ + { + "og:image": "https://flintarts.org/uploads/images/page-headers/_headerImage/nightshot.jpg", + "og:type": "website", + "viewport": "width=device-width, initial-scale=1", + "og:title": "Lectures & Discussions | Flint Institute of Arts", + "og:description": "The Flint Institute of Arts is the second largest art museum in Michigan and one of the largest museum art schools in the nation." + } + ], + "cse_image": [ + { + "src": "https://flintarts.org/uploads/images/page-headers/_headerImage/nightshot.jpg" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Mandel Lectures | Mandel Center for the Humanities ... - Waltham", + "htmlTitle": "Mandel \u003cb\u003eLectures\u003c/b\u003e | Mandel Center for the Humanities ... - Waltham", + "link": "https://www.brandeis.edu/mandel-center-humanities/mandel-lectures.html", + "displayLink": "www.brandeis.edu", + "snippet": "Past Lectures · Lecture 1: \"Invisible Music: The Sonic Idea of Black Revolution From Captivity to Reconstruction\" · Lecture 2: \"Solidarity in Sound: Grassroots ...", + "htmlSnippet": "Past \u003cb\u003eLectures\u003c/b\u003e · \u003cb\u003eLecture\u003c/b\u003e 1: "Invisible Music: The Sonic Idea of Black Revolution From Captivity to Reconstruction" · \u003cb\u003eLecture\u003c/b\u003e 2: "Solidarity in Sound: Grassroots ...", + "cacheId": "cQLOZr0kgEEJ", + "formattedUrl": "https://www.brandeis.edu/mandel-center-humanities/mandel-lectures.html", + "htmlFormattedUrl": "https://www.brandeis.edu/mandel-center-humanities/mandel-\u003cb\u003electures\u003c/b\u003e.html", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQWlU7bcJ5pIHk7RBCk2QKE-48ejF7hyPV0pr-20_cBt2BGdfKtiYXBuyw&s", + "width": "275", + "height": "183" + } + ], + "metatags": [ + { + "og:image": "https://www.brandeis.edu/mandel-center-humanities/events/events-images/mlhzumba", + "twitter:card": "summary_large_image", + "viewport": "width=device-width,initial-scale=1,minimum-scale=1", + "og:title": "Mandel Lectures in the Humanities", + "og:url": "https://www.brandeis.edu/mandel-center-humanities/mandel-lectures.html", + "og:description": "Annual Lecture Series", + "twitter:image": "https://www.brandeis.edu/mandel-center-humanities/events/events-images/mlhzumba" + } + ], + "cse_image": [ + { + "src": "https://www.brandeis.edu/mandel-center-humanities/events/events-images/mlhzumba" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Brian Douglas - YouTube", + "htmlTitle": "Brian Douglas - YouTube", + "link": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "displayLink": "www.youtube.com", + "snippet": "Welcome to Control Systems Lectures! This collection of videos is intended to supplement a first year controls class, not replace it.", + "htmlSnippet": "Welcome to Control Systems \u003cb\u003eLectures\u003c/b\u003e! This collection of videos is intended to supplement a first year controls class, not replace it.", + "cacheId": "NEROyBHolL0J", + "formattedUrl": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "htmlFormattedUrl": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "pagemap": { + "hcard": [ + { + "fn": "Brian Douglas", + "url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg" + } + ], + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcR7G0CeCBz_wVTZgjnhEr2QbiKP7f3uYzKitZYn74Mi32cDmVxvsegJoLI&s", + "width": "225", + "height": "225" + } + ], + "imageobject": [ + { + "width": "900", + "url": "https://yt3.googleusercontent.com/ytc/AIdro_nLo68wetImbwGUYP3stve_iKmAEccjhqB-q4o79xdInN4=s900-c-k-c0x00ffffff-no-rj", + "height": "900" + } + ], + "person": [ + { + "name": "Brian Douglas", + "url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg" + } + ], + "metatags": [ + { + "apple-itunes-app": "app-id=544007664, app-argument=https://m.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg?referring_app=com.apple.mobilesafari-smartbanner, affiliate-data=ct=smart_app_banner_polymer&pt=9008", + "og:image": "https://yt3.googleusercontent.com/ytc/AIdro_nLo68wetImbwGUYP3stve_iKmAEccjhqB-q4o79xdInN4=s900-c-k-c0x00ffffff-no-rj", + "twitter:app:url:iphone": "vnd.youtube://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "twitter:app:id:googleplay": "com.google.android.youtube", + "theme-color": "rgb(255, 255, 255)", + "og:image:width": "900", + "twitter:card": "summary", + "og:site_name": "YouTube", + "twitter:url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "twitter:app:url:ipad": "vnd.youtube://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "al:android:package": "com.google.android.youtube", + "twitter:app:name:googleplay": "YouTube", + "al:ios:url": "vnd.youtube://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "twitter:app:id:iphone": "544007664", + "og:description": "Welcome to Control Systems Lectures! This collection of videos is intended to supplement a first year controls class, not replace it. My goal is to take specific concepts in controls and expand on them in order to provide an intuitive understanding which will ultimately make you a better controls engineer. \n\nI'm glad you made it to my channel and I hope you find it useful.\n\nShoot me a message at controlsystemlectures@gmail.com, leave a comment or question and I'll get back to you if I can. Don't forget to subscribe!\n \nTwitter: @BrianBDouglas for engineering tweets and announcement of new videos.\nWebpage: http://engineeringmedia.com\n\nHere is the hardware/software I use: http://www.youtube.com/watch?v=m-M5_mIyHe4\n\nHere's a list of my favorite references: http://bit.ly/2skvmWd\n\n--Brian", + "al:ios:app_store_id": "544007664", + "twitter:image": "https://yt3.googleusercontent.com/ytc/AIdro_nLo68wetImbwGUYP3stve_iKmAEccjhqB-q4o79xdInN4=s900-c-k-c0x00ffffff-no-rj", + "twitter:site": "@youtube", + "og:type": "profile", + "twitter:title": "Brian Douglas", + "al:ios:app_name": "YouTube", + "og:title": "Brian Douglas", + "og:image:height": "900", + "twitter:app:id:ipad": "544007664", + "al:web:url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg?feature=applinks", + "al:android:url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg?feature=applinks", + "fb:app_id": "87741124305", + "twitter:app:url:googleplay": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "twitter:app:name:ipad": "YouTube", + "viewport": "width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no,", + "twitter:description": "Welcome to Control Systems Lectures! This collection of videos is intended to supplement a first year controls class, not replace it. My goal is to take specific concepts in controls and expand on them in order to provide an intuitive understanding which will ultimately make you a better controls engineer. \n\nI'm glad you made it to my channel and I hope you find it useful.\n\nShoot me a message at controlsystemlectures@gmail.com, leave a comment or question and I'll get back to you if I can. Don't forget to subscribe!\n \nTwitter: @BrianBDouglas for engineering tweets and announcement of new videos.\nWebpage: http://engineeringmedia.com\n\nHere is the hardware/software I use: http://www.youtube.com/watch?v=m-M5_mIyHe4\n\nHere's a list of my favorite references: http://bit.ly/2skvmWd\n\n--Brian", + "og:url": "https://www.youtube.com/channel/UCq0imsn84ShAe9PBOFnoIrg", + "al:android:app_name": "YouTube", + "twitter:app:name:iphone": "YouTube" + } + ], + "cse_image": [ + { + "src": "https://yt3.googleusercontent.com/ytc/AIdro_nLo68wetImbwGUYP3stve_iKmAEccjhqB-q4o79xdInN4=s900-c-k-c0x00ffffff-no-rj" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Lecture - Wikipedia", + "htmlTitle": "\u003cb\u003eLecture\u003c/b\u003e - Wikipedia", + "link": "https://en.wikipedia.org/wiki/Lecture", + "displayLink": "en.wikipedia.org", + "snippet": "Lecture ... For the academic rank, see Lecturer. A lecture (from Latin: lēctūra 'reading') is an oral presentation intended to present information or teach people ...", + "htmlSnippet": "\u003cb\u003eLecture\u003c/b\u003e ... For the academic rank, see \u003cb\u003eLecturer\u003c/b\u003e. A \u003cb\u003electure\u003c/b\u003e (from Latin: lēctūra 'reading') is an oral presentation intended to present information or teach people ...", + "cacheId": "d9Pjta02fmgJ", + "formattedUrl": "https://en.wikipedia.org/wiki/Lecture", + "htmlFormattedUrl": "https://en.wikipedia.org/wiki/Lecture", + "pagemap": { + "metatags": [ + { + "referrer": "origin", + "og:image": "https://upload.wikimedia.org/wikipedia/commons/thumb/2/26/ADFA_Lecture_Theatres.jpg/1200px-ADFA_Lecture_Theatres.jpg", + "theme-color": "#eaecf0", + "og:image:width": "1200", + "og:type": "website", + "viewport": "width=device-width, initial-scale=1.0, user-scalable=yes, minimum-scale=0.25, maximum-scale=5.0", + "og:title": "Lecture - Wikipedia", + "og:image:height": "799", + "format-detection": "telephone=no" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Mount Wilson Observatory | Lectures", + "htmlTitle": "Mount Wilson Observatory | \u003cb\u003eLectures\u003c/b\u003e", + "link": "https://www.mtwilson.edu/lectures/", + "displayLink": "www.mtwilson.edu", + "snippet": "Talks & Telescopes: August 24, 2024 – Panel: The Triumph of Hubble ... Compelling talks followed by picnicking and convivial stargazing through both the big ...", + "htmlSnippet": "Talks & Telescopes: August 24, 2024 – Panel: The Triumph of Hubble ... Compelling talks followed by picnicking and convivial stargazing through both the big ...", + "cacheId": "wdXI0azqx5UJ", + "formattedUrl": "https://www.mtwilson.edu/lectures/", + "htmlFormattedUrl": "https://www.mtwilson.edu/\u003cb\u003electures\u003c/b\u003e/", + "pagemap": { + "metatags": [ + { + "viewport": "width=device-width,initial-scale=1,user-scalable=no" + } + ], + "webpage": [ + { + "image": "http://www.mtwilson.edu/wp-content/uploads/2016/09/Logo.jpg", + "url": "https://www.facebook.com/WilsonObs" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Lectures | NBER", + "htmlTitle": "\u003cb\u003eLectures\u003c/b\u003e | NBER", + "link": "https://www.nber.org/research/lectures", + "displayLink": "www.nber.org", + "snippet": "Results 1 - 50 of 354 ... Among featured events at the NBER Summer Institute are the Martin Feldstein Lecture, which examines a current issue involving economic ...", + "htmlSnippet": "Results 1 - 50 of 354 \u003cb\u003e...\u003c/b\u003e Among featured events at the NBER Summer Institute are the Martin Feldstein \u003cb\u003eLecture\u003c/b\u003e, which examines a current issue involving economic ...", + "cacheId": "CvvP3U3nb44J", + "formattedUrl": "https://www.nber.org/research/lectures", + "htmlFormattedUrl": "https://www.nber.org/research/\u003cb\u003electures\u003c/b\u003e", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTmeViEZyV1YmFEFLhcA6WdgAG3v3RV6tB93ncyxSJ5JPst_p2aWrL7D1k&s", + "width": "310", + "height": "163" + } + ], + "metatags": [ + { + "og:image": "https://www.nber.org/sites/default/files/2022-06/NBER-FB-Share-Tile-1200.jpg", + "og:site_name": "NBER", + "handheldfriendly": "true", + "viewport": "width=device-width, initial-scale=1.0", + "og:title": "Lectures", + "mobileoptimized": "width", + "og:url": "https://www.nber.org/research/lectures" + } + ], + "cse_image": [ + { + "src": "https://www.nber.org/sites/default/files/2022-06/NBER-FB-Share-Tile-1200.jpg" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "STUDENTS CANNOT ACCESS RECORDED LECTURES ... - Solved", + "htmlTitle": "STUDENTS CANNOT ACCESS RECORDED LECTURES ... - Solved", + "link": "https://community.canvaslms.com/t5/Canvas-Question-Forum/STUDENTS-CANNOT-ACCESS-RECORDED-LECTURES/td-p/190358", + "displayLink": "community.canvaslms.com", + "snippet": "Mar 19, 2020 ... I believe the issue is that students were not invited. Are you trying to capture your screen? If not, there is an option to just record your web ...", + "htmlSnippet": "Mar 19, 2020 \u003cb\u003e...\u003c/b\u003e I believe the issue is that students were not invited. Are you trying to capture your screen? If not, there is an option to just record your web ...", + "cacheId": "wqrynQXX61sJ", + "formattedUrl": "https://community.canvaslms.com/t5/Canvas...LECTURES/td-p/190358", + "htmlFormattedUrl": "https://community.canvaslms.com/t5/Canvas...\u003cb\u003eLECTURES\u003c/b\u003e/td-p/190358", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcRUqXau3N8LfKgSD7OJOvV7xzGarLKRU-ckWXy1ZQ1p4CLPsedvLKmLMhk&s", + "width": "310", + "height": "163" + } + ], + "metatags": [ + { + "og:image": "https://community.canvaslms.com/html/@6A1FDD4D5FF35E4BBB4083A1022FA0DB/assets/CommunityPreview23.png", + "og:type": "article", + "article:section": "Canvas Question Forum", + "article:published_time": "2020-03-19T15:50:03.409Z", + "og:site_name": "Instructure Community", + "article:modified_time": "2020-03-19T13:55:53-07:00", + "viewport": "width=device-width, initial-scale=1.0, user-scalable=yes", + "og:title": "STUDENTS CANNOT ACCESS RECORDED LECTURES", + "og:url": "https://community.canvaslms.com/t5/Canvas-Question-Forum/STUDENTS-CANNOT-ACCESS-RECORDED-LECTURES/m-p/190358#M93667", + "og:description": "I can access and see my recorded lectures but my students can't. They have an error message when they try to open the recorded presentation or notes.", + "article:author": "https://community.canvaslms.com/t5/user/viewprofilepage/user-id/794287", + "twitter:image": "https://community.canvaslms.com/html/@6A1FDD4D5FF35E4BBB4083A1022FA0DB/assets/CommunityPreview23.png" + } + ], + "cse_image": [ + { + "src": "https://community.canvaslms.com/html/@6A1FDD4D5FF35E4BBB4083A1022FA0DB/assets/CommunityPreview23.png" + } + ] + } + }, + { + "kind": "customsearch#result", + "title": "Public Lecture Series - Sam Fox School of Design & Visual Arts", + "htmlTitle": "Public \u003cb\u003eLecture\u003c/b\u003e Series - Sam Fox School of Design & Visual Arts", + "link": "https://samfoxschool.wustl.edu/calendar/series/2-public-lecture-series", + "displayLink": "samfoxschool.wustl.edu", + "snippet": "The Sam Fox School's Spring 2024 Public Lecture Series highlights design and art as catalysts for change. Renowned speakers will delve into themes like ...", + "htmlSnippet": "The Sam Fox School's Spring 2024 Public \u003cb\u003eLecture\u003c/b\u003e Series highlights design and art as catalysts for change. Renowned speakers will delve into themes like ...", + "cacheId": "B-cgQG0j6tUJ", + "formattedUrl": "https://samfoxschool.wustl.edu/calendar/series/2-public-lecture-series", + "htmlFormattedUrl": "https://samfoxschool.wustl.edu/calendar/series/2-public-lecture-series", + "pagemap": { + "cse_thumbnail": [ + { + "src": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQSmHaGianm-64m-qauYjkPK_Q0JKWe-7yom4m1ogFYTmpWArA7k6dmk0sR&s", + "width": "307", + "height": "164" + } + ], + "website": [ + { + "name": "Public Lecture Series - Sam Fox School of Design & Visual Arts — Washington University in St. Louis" + } + ], + "metatags": [ + { + "og:image": "https://dvsp0hlm0xrn3.cloudfront.net/assets/default_og_image-44e73dee4b9d1e2c6a6295901371270c8ec5899eaed48ee8167a9b12f1b0f8b3.jpg", + "og:type": "website", + "og:site_name": "Sam Fox School of Design & Visual Arts — Washington University in St. Louis", + "viewport": "width=device-width, initial-scale=1.0", + "og:title": "Public Lecture Series - Sam Fox School of Design & Visual Arts — Washington University in St. Louis", + "csrf-token": "jBQsfZGY3RH8NVs0-KVDBYB-2N2kib4UYZHYdrShfTdLkvzfSvGeOaMrRKTRdYBPRKzdcGIuP7zwm9etqX_uvg", + "csrf-param": "authenticity_token", + "og:description": "The Sam Fox School's Spring 2024 Public Lecture Series highlights design and art as catalysts for change. Renowned speakers will delve into themes like social equity, resilient cities, and the impact of emerging technologies on contemporary life. Speakers include artists, architects, designers, and critics of the highest caliber, widely recognized for their research-based practices and multidisciplinary approaches to their fields." + } + ], + "cse_image": [ + { + "src": "https://dvsp0hlm0xrn3.cloudfront.net/assets/default_og_image-44e73dee4b9d1e2c6a6295901371270c8ec5899eaed48ee8167a9b12f1b0f8b3.jpg" + } + ] + } + } + ] +} diff --git a/backend/apps/rag/search/testdata/searxng.json b/backend/apps/rag/search/testdata/searxng.json new file mode 100644 index 0000000000000000000000000000000000000000..0e6952baa807842cf130bd0232eab6fe55f1ffba --- /dev/null +++ b/backend/apps/rag/search/testdata/searxng.json @@ -0,0 +1,476 @@ +{ + "query": "python", + "number_of_results": 116000000, + "results": [ + { + "url": "https://www.python.org/", + "title": "Welcome to Python.org", + "content": "Python is a versatile and powerful language that lets you work quickly and integrate systems more effectively. Learn how to get started, download the latest version, access documentation, find jobs, and join the Python community.", + "engine": "bing", + "parsed_url": ["https", "www.python.org", "/", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [1, 1, 1], + "score": 9.0, + "category": "general" + }, + { + "url": "https://wiki.nerdvpn.de/wiki/Python_(programming_language)", + "title": "Python (programming language) - Wikipedia", + "content": "Python is a high-level, general-purpose programming language. Its design philosophy emphasizes code readability with the use of significant indentation. Python is dynamically typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly procedural), object-oriented and functional programming.", + "engine": "bing", + "parsed_url": ["https", "wiki.nerdvpn.de", "/wiki/Python_(programming_language)", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [4, 3, 2], + "score": 3.25, + "category": "general" + }, + { + "url": "https://docs.python.org/3/tutorial/index.html", + "title": "The Python Tutorial \u2014 Python 3.12.3 documentation", + "content": "3 days ago \u00b7 Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python\u2019s elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many \u2026", + "engine": "bing", + "parsed_url": ["https", "docs.python.org", "/3/tutorial/index.html", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [5, 5, 3], + "score": 2.2, + "category": "general" + }, + { + "url": "https://www.python.org/downloads/", + "title": "Download Python | Python.org", + "content": "Python is a popular programming language for various purposes. Find the latest version of Python for different operating systems, download release notes, and learn about the development process.", + "engine": "bing", + "parsed_url": ["https", "www.python.org", "/downloads/", "", "", ""], + "template": "default.html", + "engines": ["bing", "duckduckgo"], + "positions": [2, 2], + "score": 2.0, + "category": "general" + }, + { + "url": "https://www.python.org/about/gettingstarted/", + "title": "Python For Beginners | Python.org", + "content": "Learn the basics of Python, a popular and easy-to-use programming language, from installing it to using it for various purposes. Find out how to access online documentation, tutorials, books, code samples, and more resources to help you get started with Python.", + "engine": "bing", + "parsed_url": ["https", "www.python.org", "/about/gettingstarted/", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [9, 4, 4], + "score": 1.8333333333333333, + "category": "general" + }, + { + "url": "https://www.python.org/shell/", + "title": "Welcome to Python.org", + "content": "Python is a versatile and easy-to-use programming language that lets you work quickly. Learn more about Python, download the latest version, access documentation, find jobs, and join the community.", + "engine": "bing", + "parsed_url": ["https", "www.python.org", "/shell/", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [3, 10, 8], + "score": 1.675, + "category": "general" + }, + { + "url": "https://realpython.com/", + "title": "Python Tutorials \u2013 Real Python", + "content": "Real Python offers comprehensive and up-to-date tutorials, books, and courses for Python developers of all skill levels. Whether you want to learn Python basics, web development, data science, machine learning, or more, you can find clear and practical guides and code examples here.", + "engine": "bing", + "parsed_url": ["https", "realpython.com", "/", "", "", ""], + "template": "default.html", + "engines": ["bing", "qwant", "duckduckgo"], + "positions": [6, 6, 5], + "score": 1.6, + "category": "general" + }, + { + "url": "https://wiki.nerdvpn.de/wiki/Python", + "title": "Python", + "content": "Topics referred to by the same term", + "engine": "wikipedia", + "parsed_url": ["https", "wiki.nerdvpn.de", "/wiki/Python", "", "", ""], + "template": "default.html", + "engines": ["wikipedia"], + "positions": [1], + "score": 1.0, + "category": "general" + }, + { + "title": "Online Python - IDE, Editor, Compiler, Interpreter", + "content": "Online Python IDE is a free online tool that lets you write, execute, and share Python code in the web browser. Learn about Python, its features, and its popularity as a general-purpose programming language for web development, data science, and more.", + "url": "https://www.online-python.com/", + "engine": "duckduckgo", + "parsed_url": ["https", "www.online-python.com", "/", "", "", ""], + "template": "default.html", + "engines": ["qwant", "duckduckgo"], + "positions": [8, 6], + "score": 0.5833333333333333, + "category": "general" + }, + { + "url": "https://micropython.org/", + "title": "MicroPython - Python for microcontrollers", + "content": "MicroPython is a full Python compiler and runtime that runs on the bare-metal. You get an interactive prompt (the REPL) to execute commands immediately, along ...", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "micropython.org", "/", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [1], + "score": 1.0, + "category": "general" + }, + { + "url": "https://dictionary.cambridge.org/uk/dictionary/english/python", + "title": "PYTHON | \u0417\u043d\u0430\u0447\u0435\u043d\u043d\u044f \u0432 \u0430\u043d\u0433\u043b\u0456\u0439\u0441\u044c\u043a\u0456\u0439 \u043c\u043e\u0432\u0456 - Cambridge Dictionary", + "content": "Apr 17, 2024 \u2014 \u0412\u0438\u0437\u043d\u0430\u0447\u0435\u043d\u043d\u044f PYTHON: 1. a very large snake that kills animals for food by wrapping itself around them and crushing them\u2026. \u0414\u0456\u0437\u043d\u0430\u0439\u0442\u0435\u0441\u044f \u0431\u0456\u043b\u044c\u0448\u0435.", + "img_src": null, + "engine": "google", + "parsed_url": [ + "https", + "dictionary.cambridge.org", + "/uk/dictionary/english/python", + "", + "", + "" + ], + "template": "default.html", + "engines": ["google"], + "positions": [2], + "score": 0.5, + "category": "general" + }, + { + "url": "https://www.codetoday.co.uk/code", + "title": "Web-based Python Editor (with Turtle graphics)", + "content": "Quick way of starting to write Python code, including drawing with Turtle, provided by CodeToday using Trinket.io Ideal for young children to start ...", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "www.codetoday.co.uk", "/code", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [3], + "score": 0.3333333333333333, + "category": "general" + }, + { + "url": "https://snapcraft.io/docs/python-plugin", + "title": "The python plugin | Snapcraft documentation", + "content": "The python plugin can be used by either Python 2 or Python 3 based parts using a setup.py script for building the project, or using a package published to ...", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "snapcraft.io", "/docs/python-plugin", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [4], + "score": 0.25, + "category": "general" + }, + { + "url": "https://www.developer-tech.com/categories/developer-languages/developer-languages-python/", + "title": "Latest Python Developer News", + "content": "Python's status as the primary language for AI and machine learning projects, from its extensive data-handling capabilities to its flexibility and ...", + "img_src": null, + "engine": "google", + "parsed_url": [ + "https", + "www.developer-tech.com", + "/categories/developer-languages/developer-languages-python/", + "", + "", + "" + ], + "template": "default.html", + "engines": ["google"], + "positions": [5], + "score": 0.2, + "category": "general" + }, + { + "url": "https://subjectguides.york.ac.uk/coding/python", + "title": "Coding: a Practical Guide - Python - Subject Guides", + "content": "Python is a coding language used for a wide range of things, including working with data, building systems and software, and even creating games.", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "subjectguides.york.ac.uk", "/coding/python", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [6], + "score": 0.16666666666666666, + "category": "general" + }, + { + "url": "https://hub.salford.ac.uk/psytech/python/getting-started-python/", + "title": "Getting Started - Python - Salford PsyTech Home - The Hub", + "content": "Python in itself is a very friendly programming language, when we get to grips with writing code, once you grasp the logic, it will become very intuitive.", + "img_src": null, + "engine": "google", + "parsed_url": [ + "https", + "hub.salford.ac.uk", + "/psytech/python/getting-started-python/", + "", + "", + "" + ], + "template": "default.html", + "engines": ["google"], + "positions": [7], + "score": 0.14285714285714285, + "category": "general" + }, + { + "url": "https://snapcraft.io/docs/python-apps", + "title": "Python apps | Snapcraft documentation", + "content": "Snapcraft can be used to package and distribute Python applications in a way that enables convenient installation by users. The process of creating a snap ...", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "snapcraft.io", "/docs/python-apps", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [8], + "score": 0.125, + "category": "general" + }, + { + "url": "https://anvil.works/", + "title": "Anvil | Build Web Apps with Nothing but Python", + "content": "Anvil is a free Python-based drag-and-drop web app builder.\u200eSign Up \u00b7 \u200eSign in \u00b7 \u200ePricing \u00b7 \u200eForum", + "img_src": null, + "engine": "google", + "parsed_url": ["https", "anvil.works", "/", "", "", ""], + "template": "default.html", + "engines": ["google"], + "positions": [9], + "score": 0.1111111111111111, + "category": "general" + }, + { + "url": "https://docs.python.org/", + "title": "Python 3.12.3 documentation", + "content": "3 days ago \u00b7 This is the official documentation for Python 3.12.3. Documentation sections: What's new in Python 3.12? Or all \"What's new\" documents since Python 2.0. Tutorial. Start here: a tour of Python's syntax and features. Library reference. Standard library and builtins. Language reference.", + "engine": "bing", + "parsed_url": ["https", "docs.python.org", "/", "", "", ""], + "template": "default.html", + "engines": ["bing", "duckduckgo"], + "positions": [7, 13], + "score": 0.43956043956043955, + "category": "general" + }, + { + "title": "How to Use Python: Your First Steps - Real Python", + "content": "Learn the basics of Python syntax, installation, error handling, and more in this tutorial. You'll also code your first Python program and test your knowledge with a quiz.", + "url": "https://realpython.com/python-first-steps/", + "engine": "duckduckgo", + "parsed_url": ["https", "realpython.com", "/python-first-steps/", "", "", ""], + "template": "default.html", + "engines": ["qwant", "duckduckgo"], + "positions": [14, 7], + "score": 0.42857142857142855, + "category": "general" + }, + { + "title": "The Python Tutorial \u2014 Python 3.11.8 documentation", + "content": "This tutorial introduces the reader informally to the basic concepts and features of the Python language and system. It helps to have a Python interpreter handy for hands-on experience, but all examples are self-contained, so the tutorial can be read off-line as well. For a description of standard objects and modules, see The Python Standard ...", + "url": "https://docs.python.org/3.11/tutorial/", + "engine": "duckduckgo", + "parsed_url": ["https", "docs.python.org", "/3.11/tutorial/", "", "", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [7], + "score": 0.14285714285714285, + "category": "general" + }, + { + "url": "https://realpython.com/python-introduction/", + "title": "Introduction to Python 3 \u2013 Real Python", + "content": "Python programming language, including a brief history of the development of Python and reasons why you might select Python as your language of choice.", + "engine": "bing", + "parsed_url": ["https", "realpython.com", "/python-introduction/", "", "", ""], + "template": "default.html", + "engines": ["bing"], + "positions": [8], + "score": 0.125, + "category": "general" + }, + { + "title": "Our Documentation | Python.org", + "content": "Find online or download Python's documentation, tutorials, and guides for beginners and advanced users. Learn how to port from Python 2 to Python 3, contribute to Python, and access Python videos and books.", + "url": "https://www.python.org/doc/", + "engine": "duckduckgo", + "parsed_url": ["https", "www.python.org", "/doc/", "", "", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [9], + "score": 0.1111111111111111, + "category": "general" + }, + { + "title": "Welcome to Python.org", + "url": "http://www.get-python.org/shell/", + "content": "The mission of the Python Software Foundation is to promote, protect, and advance the Python programming language, and to support and facilitate the growth of a diverse and international community of Python programmers. Learn more. Become a Member Donate to the PSF.", + "engine": "qwant", + "parsed_url": ["http", "www.get-python.org", "/shell/", "", "", ""], + "template": "default.html", + "engines": ["qwant"], + "positions": [9], + "score": 0.1111111111111111, + "category": "general" + }, + { + "title": "About Python\u2122 | Python.org", + "content": "Python is a powerful, fast, and versatile programming language that runs on various platforms and is easy to learn. Learn how to get started, explore the applications, and join the community of Python programmers and users.", + "url": "https://www.python.org/about/", + "engine": "duckduckgo", + "parsed_url": ["https", "www.python.org", "/about/", "", "", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [11], + "score": 0.09090909090909091, + "category": "general" + }, + { + "title": "Online Python Compiler (Interpreter) - Programiz", + "content": "Write and run Python code using this online tool. You can use Python Shell like IDLE, and take inputs from the user in our Python compiler.", + "url": "https://www.programiz.com/python-programming/online-compiler/", + "engine": "duckduckgo", + "parsed_url": [ + "https", + "www.programiz.com", + "/python-programming/online-compiler/", + "", + "", + "" + ], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [12], + "score": 0.08333333333333333, + "category": "general" + }, + { + "title": "Welcome to Python.org", + "content": "Python is a versatile and powerful language that lets you work quickly and integrate systems more effectively. Download the latest version, read the documentation, find jobs, events, success stories, and more on Python.org.", + "url": "https://www.python.org/?downloads", + "engine": "duckduckgo", + "parsed_url": ["https", "www.python.org", "/", "", "downloads", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [15], + "score": 0.06666666666666667, + "category": "general" + }, + { + "url": "https://www.matillion.com/blog/the-importance-of-python-and-its-growing-influence-on-data-productivty-a-matillion-perspective", + "title": "The Importance of Python and its Growing Influence on ...", + "content": "Jan 30, 2024 \u2014 The synergy of low-code functionality with Python's versatility empowers data professionals to orchestrate complex transformations seamlessly.", + "img_src": null, + "engine": "google", + "parsed_url": [ + "https", + "www.matillion.com", + "/blog/the-importance-of-python-and-its-growing-influence-on-data-productivty-a-matillion-perspective", + "", + "", + "" + ], + "template": "default.html", + "engines": ["google"], + "positions": [10], + "score": 0.1, + "category": "general" + }, + { + "title": "BeginnersGuide - Python Wiki", + "content": "This is the program that reads Python programs and carries out their instructions; you need it before you can do any Python programming. Mac and Linux distributions may include an outdated version of Python (Python 2), but you should install an updated one (Python 3). See BeginnersGuide/Download for instructions to download the correct version ...", + "url": "https://wiki.python.org/moin/BeginnersGuide", + "engine": "duckduckgo", + "parsed_url": ["https", "wiki.python.org", "/moin/BeginnersGuide", "", "", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [16], + "score": 0.0625, + "category": "general" + }, + { + "title": "Learn Python - Free Interactive Python Tutorial", + "content": "Learn Python from scratch or improve your skills with this website that offers tutorials, exercises, tests and certification. Explore topics such as basics, data science, advanced features and more with DataCamp.", + "url": "https://www.learnpython.org/", + "engine": "duckduckgo", + "parsed_url": ["https", "www.learnpython.org", "/", "", "", ""], + "template": "default.html", + "engines": ["duckduckgo"], + "positions": [17], + "score": 0.058823529411764705, + "category": "general" + } + ], + "answers": [], + "corrections": [], + "infoboxes": [ + { + "infobox": "Python", + "id": "https://en.wikipedia.org/wiki/Python_(programming_language)", + "content": "general-purpose programming language", + "img_src": "https://upload.wikimedia.org/wikipedia/commons/thumb/6/6f/.PY_file_recreation.png/500px-.PY_file_recreation.png", + "urls": [ + { + "title": "Official website", + "url": "https://www.python.org/", + "official": true + }, + { + "title": "Wikipedia (en)", + "url": "https://en.wikipedia.org/wiki/Python_(programming_language)" + }, + { + "title": "Wikidata", + "url": "http://www.wikidata.org/entity/Q28865" + } + ], + "attributes": [ + { + "label": "Inception", + "value": "Wednesday, February 20, 1991", + "entity": "P571" + }, + { + "label": "Developer", + "value": "Python Software Foundation, Guido van Rossum", + "entity": "P178" + }, + { + "label": "Copyright license", + "value": "Python Software Foundation License", + "entity": "P275" + }, + { + "label": "Programmed in", + "value": "C, Python", + "entity": "P277" + }, + { + "label": "Software version identifier", + "value": "3.12.3, 3.13.0a6", + "entity": "P348" + } + ], + "engine": "wikidata", + "engines": ["wikidata"] + } + ], + "suggestions": [ + "python turtle", + "micro python tutorial", + "python docs", + "python compiler", + "snapcraft python", + "micropython vs python", + "python online", + "python download" + ], + "unresponsive_engines": [] +} diff --git a/backend/apps/rag/search/testdata/serper.json b/backend/apps/rag/search/testdata/serper.json new file mode 100644 index 0000000000000000000000000000000000000000..b269eaf5b34fe64234ba6e7ffb27fd3fbbaa3fe0 --- /dev/null +++ b/backend/apps/rag/search/testdata/serper.json @@ -0,0 +1,190 @@ +{ + "searchParameters": { + "q": "apple inc", + "gl": "us", + "hl": "en", + "autocorrect": true, + "page": 1, + "type": "search" + }, + "knowledgeGraph": { + "title": "Apple", + "type": "Technology company", + "website": "http://www.apple.com/", + "imageUrl": "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQwGQRv5TjjkycpctY66mOg_e2-npacrmjAb6_jAWhzlzkFE3OTjxyzbA&s=0", + "description": "Apple Inc. is an American multinational technology company specializing in consumer electronics, software and online services headquartered in Cupertino, California, United States.", + "descriptionSource": "Wikipedia", + "descriptionLink": "https://en.wikipedia.org/wiki/Apple_Inc.", + "attributes": { + "Headquarters": "Cupertino, CA", + "CEO": "Tim Cook (Aug 24, 2011–)", + "Founded": "April 1, 1976, Los Altos, CA", + "Sales": "1 (800) 692-7753", + "Products": "iPhone, Apple Watch, iPad, and more", + "Founders": "Steve Jobs, Steve Wozniak, and Ronald Wayne", + "Subsidiaries": "Apple Store, Beats Electronics, Beddit, and more" + } + }, + "organic": [ + { + "title": "Apple", + "link": "https://www.apple.com/", + "snippet": "Discover the innovative world of Apple and shop everything iPhone, iPad, Apple Watch, Mac, and Apple TV, plus explore accessories, entertainment, ...", + "sitelinks": [ + { + "title": "Support", + "link": "https://support.apple.com/" + }, + { + "title": "iPhone", + "link": "https://www.apple.com/iphone/" + }, + { + "title": "Apple makes business better.", + "link": "https://www.apple.com/business/" + }, + { + "title": "Mac", + "link": "https://www.apple.com/mac/" + } + ], + "position": 1 + }, + { + "title": "Apple Inc. - Wikipedia", + "link": "https://en.wikipedia.org/wiki/Apple_Inc.", + "snippet": "Apple Inc. is an American multinational technology company specializing in consumer electronics, software and online services headquartered in Cupertino, ...", + "attributes": { + "Products": "AirPods; Apple Watch; iPad; iPhone; Mac", + "Founders": "Steve Jobs; Steve Wozniak; Ronald Wayne", + "Founded": "April 1, 1976; 46 years ago in Los Altos, California, U.S", + "Industry": "Consumer electronics; Software services; Online services" + }, + "sitelinks": [ + { + "title": "History", + "link": "https://en.wikipedia.org/wiki/History_of_Apple_Inc." + }, + { + "title": "Timeline of Apple Inc. products", + "link": "https://en.wikipedia.org/wiki/Timeline_of_Apple_Inc._products" + }, + { + "title": "List of software by Apple Inc.", + "link": "https://en.wikipedia.org/wiki/List_of_software_by_Apple_Inc." + }, + { + "title": "Apple Store", + "link": "https://en.wikipedia.org/wiki/Apple_Store" + } + ], + "position": 2 + }, + { + "title": "Apple Inc. | History, Products, Headquarters, & Facts | Britannica", + "link": "https://www.britannica.com/topic/Apple-Inc", + "snippet": "Apple Inc., formerly Apple Computer, Inc., American manufacturer of personal computers, smartphones, tablet computers, computer peripherals, ...", + "date": "Aug 31, 2022", + "attributes": { + "Related People": "Steve Jobs Steve Wozniak Jony Ive Tim Cook Angela Ahrendts", + "Date": "1976 - present", + "Areas Of Involvement": "peripheral device" + }, + "position": 3 + }, + { + "title": "AAPL: Apple Inc Stock Price Quote - NASDAQ GS - Bloomberg.com", + "link": "https://www.bloomberg.com/quote/AAPL:US", + "snippet": "Stock analysis for Apple Inc (AAPL:NASDAQ GS) including stock price, stock chart, company news, key statistics, fundamentals and company profile.", + "position": 4 + }, + { + "title": "Apple Inc. (AAPL) Company Profile & Facts - Yahoo Finance", + "link": "https://finance.yahoo.com/quote/AAPL/profile/", + "snippet": "Apple Inc. designs, manufactures, and markets smartphones, personal computers, tablets, wearables, and accessories worldwide. It also sells various related ...", + "position": 5 + }, + { + "title": "AAPL | Apple Inc. Stock Price & News - WSJ", + "link": "https://www.wsj.com/market-data/quotes/AAPL", + "snippet": "Apple, Inc. engages in the design, manufacture, and sale of smartphones, personal computers, tablets, wearables and accessories, and other varieties of ...", + "position": 6 + }, + { + "title": "Apple Inc Company Profile - Apple Inc Overview - GlobalData", + "link": "https://www.globaldata.com/company-profile/apple-inc/", + "snippet": "Apple Inc (Apple) designs, manufactures, and markets smartphones, tablets, personal computers (PCs), portable and wearable devices. The company also offers ...", + "position": 7 + }, + { + "title": "Apple Inc (AAPL) Stock Price & News - Google Finance", + "link": "https://www.google.com/finance/quote/AAPL:NASDAQ?hl=en", + "snippet": "Get the latest Apple Inc (AAPL) real-time quote, historical performance, charts, and other financial information to help you make more informed trading and ...", + "position": 8 + } + ], + "peopleAlsoAsk": [ + { + "question": "What does Apple Inc mean?", + "snippet": "Apple Inc., formerly Apple Computer, Inc., American manufacturer of personal\ncomputers, smartphones, tablet computers, computer peripherals, and computer\nsoftware. It was the first successful personal computer company and the\npopularizer of the graphical user interface.\nAug 31, 2022", + "title": "Apple Inc. | History, Products, Headquarters, & Facts | Britannica", + "link": "https://www.britannica.com/topic/Apple-Inc" + }, + { + "question": "Is Apple and Apple Inc same?", + "snippet": "Apple was founded as Apple Computer Company on April 1, 1976, by Steve Jobs,\nSteve Wozniak and Ronald Wayne to develop and sell Wozniak's Apple I personal\ncomputer. It was incorporated by Jobs and Wozniak as Apple Computer, Inc.", + "title": "Apple Inc. - Wikipedia", + "link": "https://en.wikipedia.org/wiki/Apple_Inc." + }, + { + "question": "Who owns Apple Inc?", + "snippet": "Apple Inc. is owned by two main institutional investors (Vanguard Group and\nBlackRock, Inc). While its major individual shareholders comprise people like\nArt Levinson, Tim Cook, Bruce Sewell, Al Gore, Johny Sroujli, and others.", + "title": "Who Owns Apple In 2022? - FourWeekMBA", + "link": "https://fourweekmba.com/who-owns-apple/" + }, + { + "question": "What products does Apple Inc offer?", + "snippet": "APPLE FOOTER\nStore.\nMac.\niPad.\niPhone.\nWatch.\nAirPods.\nTV & Home.\nAirTag.", + "title": "More items...", + "link": "https://www.apple.com/business/" + } + ], + "relatedSearches": [ + { + "query": "Who invented the iPhone" + }, + { + "query": "Apple Inc competitors" + }, + { + "query": "Apple iPad" + }, + { + "query": "iPhones" + }, + { + "query": "Apple Inc us" + }, + { + "query": "Apple company history" + }, + { + "query": "Apple Store" + }, + { + "query": "Apple customer service" + }, + { + "query": "Apple Watch" + }, + { + "query": "Apple Inc Industry" + }, + { + "query": "Apple Inc registered address" + }, + { + "query": "Apple Inc Bloomberg" + } + ] +} diff --git a/backend/apps/rag/search/testdata/serply.json b/backend/apps/rag/search/testdata/serply.json new file mode 100644 index 0000000000000000000000000000000000000000..0fc2a31e4d63cefba8aa96cad147208d596060c4 --- /dev/null +++ b/backend/apps/rag/search/testdata/serply.json @@ -0,0 +1,206 @@ +{ + "ads": [], + "ads_count": 0, + "answers": [], + "results": [ + { + "title": "Apple", + "link": "https://www.apple.com/", + "description": "Discover the innovative world of Apple and shop everything iPhone, iPad, Apple Watch, Mac, and Apple TV, plus explore accessories, entertainment, ...", + "additional_links": [ + { + "text": "AppleApplehttps://www.apple.com", + "href": "https://www.apple.com/" + } + ], + "cite": {}, + "subdomains": [ + { + "title": "Support", + "link": "https://support.apple.com/", + "description": "SupportContact - iPhone Support - Billing and Subscriptions - Apple Repair" + }, + { + "title": "Store", + "link": "https://www.apple.com/store", + "description": "StoreShop iPhone - Shop iPad - App Store - Shop Mac - ..." + }, + { + "title": "Mac", + "link": "https://www.apple.com/mac/", + "description": "MacMacBook Air - MacBook Pro - iMac - Compare Mac models - Mac mini" + }, + { + "title": "iPad", + "link": "https://www.apple.com/ipad/", + "description": "iPadShop iPad - iPad Pro - iPad Air - Compare iPad models - ..." + }, + { + "title": "Watch", + "link": "https://www.apple.com/watch/", + "description": "WatchShop Apple Watch - Series 9 - SE - Ultra 2 - Nike - Hermès - ..." + } + ], + "realPosition": 1 + }, + { + "title": "Apple", + "link": "https://www.apple.com/", + "description": "Discover the innovative world of Apple and shop everything iPhone, iPad, Apple Watch, Mac, and Apple TV, plus explore accessories, entertainment, ...", + "additional_links": [ + { + "text": "AppleApplehttps://www.apple.com", + "href": "https://www.apple.com/" + } + ], + "cite": {}, + "realPosition": 2 + }, + { + "title": "Apple Inc.", + "link": "https://en.wikipedia.org/wiki/Apple_Inc.", + "description": "Apple Inc. (formerly Apple Computer, Inc.) is an American multinational corporation and technology company headquartered in Cupertino, California, ...", + "additional_links": [ + { + "text": "Apple Inc.Wikipediahttps://en.wikipedia.org › wiki › Apple_Inc", + "href": "https://en.wikipedia.org/wiki/Apple_Inc." + }, + { + "text": "", + "href": "https://en.wikipedia.org/wiki/Apple_Inc." + }, + { + "text": "History", + "href": "https://en.wikipedia.org/wiki/History_of_Apple_Inc." + }, + { + "text": "List of Apple products", + "href": "https://en.wikipedia.org/wiki/List_of_Apple_products" + }, + { + "text": "Litigation involving Apple Inc.", + "href": "https://en.wikipedia.org/wiki/Litigation_involving_Apple_Inc." + }, + { + "text": "Apple Park", + "href": "https://en.wikipedia.org/wiki/Apple_Park" + } + ], + "cite": { + "domain": "https://en.wikipedia.org › wiki › Apple_Inc", + "span": " › wiki › Apple_Inc" + }, + "realPosition": 3 + }, + { + "title": "Apple Inc. (AAPL) Company Profile & Facts", + "link": "https://finance.yahoo.com/quote/AAPL/profile/", + "description": "Apple Inc. designs, manufactures, and markets smartphones, personal computers, tablets, wearables, and accessories worldwide. The company offers iPhone, a line ...", + "additional_links": [ + { + "text": "Apple Inc. (AAPL) Company Profile & FactsYahoo Financehttps://finance.yahoo.com › quote › AAPL › profile", + "href": "https://finance.yahoo.com/quote/AAPL/profile/" + } + ], + "cite": { + "domain": "https://finance.yahoo.com › quote › AAPL › profile", + "span": " › quote › AAPL › profile" + }, + "realPosition": 4 + }, + { + "title": "Apple Inc - Company Profile and News", + "link": "https://www.bloomberg.com/profile/company/AAPL:US", + "description": "Apple Inc. Apple Inc. designs, manufactures, and markets smartphones, personal computers, tablets, wearables and accessories, and sells a variety of related ...", + "additional_links": [ + { + "text": "Apple Inc - Company Profile and NewsBloomberghttps://www.bloomberg.com › company › AAPL:US", + "href": "https://www.bloomberg.com/profile/company/AAPL:US" + }, + { + "text": "", + "href": "https://www.bloomberg.com/profile/company/AAPL:US" + } + ], + "cite": { + "domain": "https://www.bloomberg.com › company › AAPL:US", + "span": " › company › AAPL:US" + }, + "realPosition": 5 + }, + { + "title": "Apple Inc. | History, Products, Headquarters, & Facts", + "link": "https://www.britannica.com/money/Apple-Inc", + "description": "May 22, 2024 — Apple Inc. is an American multinational technology company that revolutionized the technology sector through its innovation of computer ...", + "additional_links": [ + { + "text": "Apple Inc. | History, Products, Headquarters, & FactsBritannicahttps://www.britannica.com › money › Apple-Inc", + "href": "https://www.britannica.com/money/Apple-Inc" + }, + { + "text": "", + "href": "https://www.britannica.com/money/Apple-Inc" + } + ], + "cite": { + "domain": "https://www.britannica.com › money › Apple-Inc", + "span": " › money › Apple-Inc" + }, + "realPosition": 6 + } + ], + "shopping_ads": [], + "places": [ + { + "title": "Apple Inc." + }, + { + "title": "Apple Inc" + }, + { + "title": "Apple Inc" + } + ], + "related_searches": { + "images": [], + "text": [ + { + "title": "apple inc full form", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+Inc+full+form&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhPEAE" + }, + { + "title": "apple company history", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+company+history&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhOEAE" + }, + { + "title": "apple store", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+Store&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhQEAE" + }, + { + "title": "apple id", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+id&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhSEAE" + }, + { + "title": "apple inc industry", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+Inc+industry&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhREAE" + }, + { + "title": "apple login", + "link": "https://www.google.com/search?sca_esv=6b6df170a5c9891b&sca_upv=1&q=Apple+login&sa=X&ved=2ahUKEwjLxuSJwM-GAxUHODQIHYuJBhgQ1QJ6BAhTEAE" + } + ] + }, + "image_results": [], + "carousel": [], + "total": 2450000000, + "knowledge_graph": "", + "related_questions": [ + "What does the Apple Inc do?", + "Why did Apple change to Apple Inc?", + "Who owns Apple Inc.?", + "What is Apple Inc best known for?" + ], + "carousel_count": 0, + "ts": 2.491065263748169, + "device_type": null +} diff --git a/backend/apps/rag/search/testdata/serpstack.json b/backend/apps/rag/search/testdata/serpstack.json new file mode 100644 index 0000000000000000000000000000000000000000..a82f689d8b2293586d6b94974e018f74e49d1013 --- /dev/null +++ b/backend/apps/rag/search/testdata/serpstack.json @@ -0,0 +1,276 @@ +{ + "request": { + "success": true, + "total_time_taken": 3.4, + "processed_timestamp": 1714968442, + "search_url": "http://www.google.com/search?q=mcdonalds\u0026gl=us\u0026hl=en\u0026safe=0\u0026num=10" + }, + "search_parameters": { + "engine": "google", + "type": "web", + "device": "desktop", + "auto_location": "1", + "google_domain": "google.com", + "gl": "us", + "hl": "en", + "safe": "0", + "news_type": "all", + "exclude_autocorrected_results": "0", + "images_color": "any", + "page": "1", + "num": "10", + "output": "json", + "csv_fields": "search_parameters.query,organic_results.position,organic_results.title,organic_results.url,organic_results.domain", + "query": "mcdonalds", + "action": "search", + "access_key": "aac48e007e15c532bb94ffb34532a4b2", + "error": {} + }, + "search_information": { + "total_results": 1170000000, + "time_taken_displayed": 0.49, + "detected_location": {}, + "did_you_mean": {}, + "no_results_for_original_query": false, + "showing_results_for": {} + }, + "organic_results": [ + { + "position": 1, + "title": "Our Full McDonald\u0027s Food Menu", + "snippet": "", + "prerender": false, + "cached_page_url": {}, + "related_pages_url": {}, + "url": "https://www.mcdonalds.com/us/en-us/full-menu.html", + "domain": "www.mcdonalds.com", + "displayed_url": "https://www.mcdonalds.com \u203a en-us \u203a full-menu" + }, + { + "position": 2, + "title": "McDonald\u0027s", + "snippet": "McDonald\u0027s is the world\u0027s largest fast food restaurant chain, serving over 69 million customers daily in over 100 countries in more than 40,000 outlets as of\u00a0...", + "prerender": false, + "cached_page_url": {}, + "related_pages_url": {}, + "url": "https://en.wikipedia.org/wiki/McDonald%27s", + "domain": "en.wikipedia.org", + "displayed_url": "https://en.wikipedia.org \u203a wiki \u203a McDonald\u0027s" + }, + { + "position": 3, + "title": "Restaurants Near Me: Nearby McDonald\u0027s Locations", + "snippet": "", + "prerender": false, + "cached_page_url": {}, + "related_pages_url": {}, + "url": "https://www.mcdonalds.com/us/en-us/restaurant-locator.html", + "domain": "www.mcdonalds.com", + "displayed_url": "https://www.mcdonalds.com \u203a en-us \u203a restaurant-locator" + }, + { + "position": 4, + "title": "Download the McDonald\u0027s App: Deals, Promotions \u0026 ...", + "snippet": "Download the McDonald\u0027s app for Mobile Order \u0026 Pay, exclusive deals and coupons, menu information and special promotions.", + "prerender": false, + "cached_page_url": {}, + "related_pages_url": {}, + "url": "https://www.mcdonalds.com/us/en-us/download-app.html", + "domain": "www.mcdonalds.com", + "displayed_url": "https://www.mcdonalds.com \u203a en-us \u203a download-app" + }, + { + "position": 5, + "title": "McDonald\u0027s Restaurant Careers in the US", + "snippet": "McDonald\u0027s restaurant jobs are one-of-a-kind \u2013 just like you. Restaurants are hiring across all levels, from Crew team to Management. Apply today!", + "prerender": false, + "cached_page_url": {}, + "related_pages_url": {}, + "url": "https://jobs.mchire.com/", + "domain": "jobs.mchire.com", + "displayed_url": "https://jobs.mchire.com" + } + ], + "inline_images": [ + { + "image_url": "https://serpstack-assets.apilayer.net/2418910010831954152.png", + "title": "" + } + ], + "local_results": [ + { + "position": 1, + "title": "McDonald\u0027s", + "coordinates": { + "latitude": 0, + "longitude": 0 + }, + "address": "", + "rating": 0, + "reviews": 0, + "type": "", + "price": {}, + "url": 0 + }, + { + "position": 2, + "title": "McDonald\u0027s", + "coordinates": { + "latitude": 0, + "longitude": 0 + }, + "address": "", + "rating": 0, + "reviews": 0, + "type": "", + "price": {}, + "url": 0 + }, + { + "position": 3, + "title": "McDonald\u0027s", + "coordinates": { + "latitude": 0, + "longitude": 0 + }, + "address": "", + "rating": 0, + "reviews": 0, + "type": "", + "price": {}, + "url": 0 + } + ], + "top_stories": [ + { + "block_position": 1, + "title": "Menu nutrition", + "url": "/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=mcdonald%27s+double+quarter+pounder+with+cheese\u0026stick=H4sIAAAAAAAAAONgFuLUz9U3ME-vLDBX4tVP1zc0TCsuNE0ytjTTUs5OttJPy89P0c9NzSuNLyjKL8tMSS2yAvNS80qKMlOLF7Hq5ian5Ocl5qSoFyuk5Jcm5aQqFJYmFpWkFikU5JfmATUolGeWZCgkZ6SmFqcCAM4ilJtxAAAA\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4Qri56BAh0EAM", + "source": "", + "uploaded": "", + "uploaded_utc": "2024-05-06T04:07:22.082Z" + }, + { + "block_position": 2, + "title": "Profiles", + "url": "https://www.instagram.com/McDonalds", + "source": "", + "uploaded": "", + "uploaded_utc": "2024-05-06T04:07:22.082Z" + }, + { + "block_position": 3, + "title": "People also search for", + "url": "/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026si=ACC90nzx_D3_zUKRnpAjmO0UBLNxnt7EyN4YYdru6U3bxLI-L5Wg8IL2sxPFxxcDEhVbocy-LJPZIvZySijw0ho2hfZ-KtV-sSEEJ9lw7JuEkXHDnRK5y4Dm8aqbiLwugbLbslwjG3hO_gpDTFZK2VoUGZPy2nrmOBCy0G3PoOfoiEtct2GSZlUz0uufG-xP8emtNzQKQpvjkAm5Zmi57iVZueiD62upz7-x2N3dAbwtm6FkInAPRw1yR91zuT7F3lEaPblTW3LaRwCDC0bvaRCh9x4N9zHgY1OOQa_rzts2jf5WpXcuw4Y%3D\u0026q=Burger+King\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4Qs9oBKAB6BAhzEAI", + "source": "", + "uploaded": "", + "uploaded_utc": "2024-05-06T04:07:22.082Z" + } + ], + "related_questions": [ + { + "question": "What\u0027s a number 7 at McDonald\u0027s?What\u0027s a number 7 at McDonald\u0027s?What\u0027s a number 7 at McDonald\u0027s?", + "answer": "", + "title": "", + "displayed_url": "" + }, + { + "question": "Why is McDonald\u0027s changing their name?Why is McDonald\u0027s changing their name?Why is McDonald\u0027s changing their name?", + "answer": "", + "title": "", + "displayed_url": "" + }, + { + "question": "What is the oldest still running Mcdonalds?What is the oldest still running Mcdonalds?What is the oldest still running Mcdonalds?", + "answer": "", + "title": "", + "displayed_url": "" + }, + { + "question": "Why is McDonald\u0027s now WcDonald\u0027s?Why is McDonald\u0027s now WcDonald\u0027s?Why is McDonald\u0027s now WcDonald\u0027s?", + "answer": "", + "title": "", + "displayed_url": "" + } + ], + "knowledge_graph": { + "title": "", + "type": "Fast-food restaurant company", + "image_urls": ["https://serpstack-assets.apilayer.net/2418910010831954152.png"], + "description": "McDonald\u0027s Corporation is an American multinational fast food chain, founded in 1940 as a restaurant operated by Richard and Maurice McDonald, in San Bernardino, California, United States.", + "source": { + "name": "Wikipedia", + "url": "https://en.wikipedia.org/wiki/McDonald\u0027s" + }, + "people_also_search_for": [], + "known_attributes": [ + { + "attribute": "kc:/business/business_operation:founder", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Ray+Kroc\u0026si=ACC90nzx_D3_zUKRnpAjmO0UBLNxnt7EyN4YYdru6U3bxLI-LxARWRdbk5SkoY2sDn5Qq7yOmqYGei6qZ7sfJhsjZXBPgjMlLbS7824rpJOm69GzqVWMdoNIZiFX2T4A2td14sZOn4a1BexZLtZXHU7NZdF6VsWbGMVuiSYtXdev7uaUjEJKumiwlqTAATTebOriYTEBuSzC\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECHgQAg", + "name": "Founder: ", + "value": "Ray Kroc" + }, + { + "attribute": "kc:/organization/organization:ceo", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Chris+Kempczinski\u0026si=ACC90nwLLwns5sISZcdzuISy7t-NHozt8Cbt6G3WNQfC9ekAgKFbjdEFCDgxLbt57EDZGosYDGiZuq1AcBhA6IhTOSZxfVSySuGQ3VDwmmTA7Z93n3K3596jAuZH9VVv5h8PyvKJSuGuSsQWviJTl3eKj2UL1ZIWuDgkjyVMnC47rN7j0G9PlHRCCLdQF7VDQ1gubTiC4onXqLRBTbwAj6a--PD6Jv_NoA%3D%3D\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECHUQAg", + "name": "CEO: ", + "value": "Chris Kempczinski (Nov 1, 2019\u2013)" + }, + { + "attribute": "kc:/business/employer:revenue", + "link": "", + "name": "Revenue: ", + "value": "25.49\u00a0billion USD (2023)" + }, + { + "attribute": "kc:/organization/organization:founded", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Des+Plaines\u0026si=ACC90nyvvWro6QmnyY1IfSdgk5wwjB1r8BGd_IWRjXqmKPQqm_yqLtI_DBi5PXGOtg_Z3qrzzEP6mcih1nN7h5A7v6OefnEJiC7a8dBR-v9LxlRubfyR6vlMr3fZ3TmVKWwz9FRpvZb1eYNt-RM7KIDKQlwGEIgINvzhxjUrv6uxSmceduzxd8W7Pkz71XGwxF0F8OlSzHlx\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECG4QAg", + "name": "Founded: ", + "value": "April 15, 1955, Des Plaines, IL" + }, + { + "attribute": "kc:/organization/organization:headquarters", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Chicago\u0026si=ACC90nyvvWro6QmnyY1IfSdgk5wwjB1r8BGd_IWRjXqmKPQqm-46AEJ_kJbUIEvsvEEZqteiYJvXVXs2ScRNDvFFpjfeAaW3dxtpTGCgcsf5RMdi6IdzOdtjJMN3ZaFwqZOmdi7tC6r0Mh1O9bnP3HrVDB9hH02m7aA6f70dCAfTdpOFnGxDU6wVMAI5MxWBE3wTugtUDOK-\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECHYQAg", + "name": "Headquarters: ", + "value": "Chicago, IL" + }, + { + "attribute": "kc:/organization/organization:president", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Chris+Kempczinski\u0026si=ACC90nwLLwns5sISZcdzuISy7t-NHozt8Cbt6G3WNQfC9ekAgKFbjdEFCDgxLbt57EDZGosYDGiZuq1AcBhA6IhTOSZxfVSySuGQ3VDwmmTA7Z93n3K3596jAuZH9VVv5h8PyvKJSuGuSsQWviJTl3eKj2UL1ZIWuDgkjyVMnC47rN7j0G9PlHRCCLdQF7VDQ1gubTiC4onXqLRBTbwAj6a--PD6Jv_NoA%3D%3D\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECHEQAg", + "name": "President: ", + "value": "Chris Kempczinski" + } + ], + "website": "https://www.mcdonalds.com/us/en-us.html", + "profiles": [ + { + "name": "Instagram", + "url": "https://www.instagram.com/McDonalds" + }, + { + "name": "X (Twitter)", + "url": "https://twitter.com/McDonalds" + }, + { + "name": "Facebook", + "url": "https://www.facebook.com/McDonaldsUS" + }, + { + "name": "YouTube", + "url": "https://www.youtube.com/user/McDonaldsUS" + }, + { + "name": "Pinterest", + "url": "https://www.pinterest.com/mcdonalds" + } + ], + "founded": "April 15, 1955, Des Plaines, IL", + "headquarters": "Chicago, IL", + "founders": [ + { + "name": "Ray Kroc", + "link": "http://www.google.com/search?safe=0\u0026sca_esv=c9c7fd42856085e2\u0026sca_upv=1\u0026gl=us\u0026hl=en\u0026q=Ray+Kroc\u0026si=ACC90nzx_D3_zUKRnpAjmO0UBLNxnt7EyN4YYdru6U3bxLI-LxARWRdbk5SkoY2sDn5Qq7yOmqYGei6qZ7sfJhsjZXBPgjMlLbS7824rpJOm69GzqVWMdoNIZiFX2T4A2td14sZOn4a1BexZLtZXHU7NZdF6VsWbGMVuiSYtXdev7uaUjEJKumiwlqTAATTebOriYTEBuSzC\u0026sa=X\u0026ved=2ahUKEwjF55alk_iFAxXlamwGHbqgAs4QmxMoAHoECHgQAg" + } + ] + } +} diff --git a/backend/apps/rag/utils.py b/backend/apps/rag/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fde89b0697414a54a5fdabf4cdfb853c12e11c70 --- /dev/null +++ b/backend/apps/rag/utils.py @@ -0,0 +1,493 @@ +import os +import logging +import requests + +from typing import List, Union + +from apps.ollama.main import ( + generate_ollama_embeddings, + GenerateEmbeddingsForm, +) + +from huggingface_hub import snapshot_download + +from langchain_core.documents import Document +from langchain_community.retrievers import BM25Retriever +from langchain.retrievers import ( + ContextualCompressionRetriever, + EnsembleRetriever, +) + +from typing import Optional + +from utils.misc import get_last_user_message, add_or_update_system_message +from config import SRC_LOG_LEVELS, CHROMA_CLIENT + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +def query_doc( + collection_name: str, + query: str, + embedding_function, + k: int, +): + try: + collection = CHROMA_CLIENT.get_collection(name=collection_name) + query_embeddings = embedding_function(query) + + result = collection.query( + query_embeddings=[query_embeddings], + n_results=k, + ) + + log.info(f"query_doc:result {result}") + return result + except Exception as e: + raise e + + +def query_doc_with_hybrid_search( + collection_name: str, + query: str, + embedding_function, + k: int, + reranking_function, + r: float, +): + try: + collection = CHROMA_CLIENT.get_collection(name=collection_name) + documents = collection.get() # get all documents + + bm25_retriever = BM25Retriever.from_texts( + texts=documents.get("documents"), + metadatas=documents.get("metadatas"), + ) + bm25_retriever.k = k + + chroma_retriever = ChromaRetriever( + collection=collection, + embedding_function=embedding_function, + top_n=k, + ) + + ensemble_retriever = EnsembleRetriever( + retrievers=[bm25_retriever, chroma_retriever], weights=[0.5, 0.5] + ) + + compressor = RerankCompressor( + embedding_function=embedding_function, + top_n=k, + reranking_function=reranking_function, + r_score=r, + ) + + compression_retriever = ContextualCompressionRetriever( + base_compressor=compressor, base_retriever=ensemble_retriever + ) + + result = compression_retriever.invoke(query) + result = { + "distances": [[d.metadata.get("score") for d in result]], + "documents": [[d.page_content for d in result]], + "metadatas": [[d.metadata for d in result]], + } + + log.info(f"query_doc_with_hybrid_search:result {result}") + return result + except Exception as e: + raise e + + +def merge_and_sort_query_results(query_results, k, reverse=False): + # Initialize lists to store combined data + combined_distances = [] + combined_documents = [] + combined_metadatas = [] + + for data in query_results: + combined_distances.extend(data["distances"][0]) + combined_documents.extend(data["documents"][0]) + combined_metadatas.extend(data["metadatas"][0]) + + # Create a list of tuples (distance, document, metadata) + combined = list(zip(combined_distances, combined_documents, combined_metadatas)) + + # Sort the list based on distances + combined.sort(key=lambda x: x[0], reverse=reverse) + + # We don't have anything :-( + if not combined: + sorted_distances = [] + sorted_documents = [] + sorted_metadatas = [] + else: + # Unzip the sorted list + sorted_distances, sorted_documents, sorted_metadatas = zip(*combined) + + # Slicing the lists to include only k elements + sorted_distances = list(sorted_distances)[:k] + sorted_documents = list(sorted_documents)[:k] + sorted_metadatas = list(sorted_metadatas)[:k] + + # Create the output dictionary + result = { + "distances": [sorted_distances], + "documents": [sorted_documents], + "metadatas": [sorted_metadatas], + } + + return result + + +def query_collection( + collection_names: List[str], + query: str, + embedding_function, + k: int, +): + results = [] + for collection_name in collection_names: + try: + result = query_doc( + collection_name=collection_name, + query=query, + k=k, + embedding_function=embedding_function, + ) + results.append(result) + except: + pass + return merge_and_sort_query_results(results, k=k) + + +def query_collection_with_hybrid_search( + collection_names: List[str], + query: str, + embedding_function, + k: int, + reranking_function, + r: float, +): + results = [] + for collection_name in collection_names: + try: + result = query_doc_with_hybrid_search( + collection_name=collection_name, + query=query, + embedding_function=embedding_function, + k=k, + reranking_function=reranking_function, + r=r, + ) + results.append(result) + except: + pass + return merge_and_sort_query_results(results, k=k, reverse=True) + + +def rag_template(template: str, context: str, query: str): + template = template.replace("[context]", context) + template = template.replace("[query]", query) + return template + + +def get_embedding_function( + embedding_engine, + embedding_model, + embedding_function, + openai_key, + openai_url, + batch_size, +): + if embedding_engine == "": + return lambda query: embedding_function.encode(query).tolist() + elif embedding_engine in ["ollama", "openai"]: + if embedding_engine == "ollama": + func = lambda query: generate_ollama_embeddings( + GenerateEmbeddingsForm( + **{ + "model": embedding_model, + "prompt": query, + } + ) + ) + elif embedding_engine == "openai": + func = lambda query: generate_openai_embeddings( + model=embedding_model, + text=query, + key=openai_key, + url=openai_url, + ) + + def generate_multiple(query, f): + if isinstance(query, list): + if embedding_engine == "openai": + embeddings = [] + for i in range(0, len(query), batch_size): + embeddings.extend(f(query[i : i + batch_size])) + return embeddings + else: + return [f(q) for q in query] + else: + return f(query) + + return lambda query: generate_multiple(query, func) + + +def get_rag_context( + files, + messages, + embedding_function, + k, + reranking_function, + r, + hybrid_search, +): + log.debug(f"files: {files} {messages} {embedding_function} {reranking_function}") + query = get_last_user_message(messages) + + extracted_collections = [] + relevant_contexts = [] + + for file in files: + context = None + + collection_names = ( + file["collection_names"] + if file["type"] == "collection" + else [file["collection_name"]] + ) + + collection_names = set(collection_names).difference(extracted_collections) + if not collection_names: + log.debug(f"skipping {file} as it has already been extracted") + continue + + try: + if file["type"] == "text": + context = file["content"] + else: + if hybrid_search: + context = query_collection_with_hybrid_search( + collection_names=collection_names, + query=query, + embedding_function=embedding_function, + k=k, + reranking_function=reranking_function, + r=r, + ) + else: + context = query_collection( + collection_names=collection_names, + query=query, + embedding_function=embedding_function, + k=k, + ) + except Exception as e: + log.exception(e) + context = None + + if context: + relevant_contexts.append({**context, "source": file}) + + extracted_collections.extend(collection_names) + + contexts = [] + citations = [] + + for context in relevant_contexts: + try: + if "documents" in context: + contexts.append( + "\n\n".join( + [text for text in context["documents"][0] if text is not None] + ) + ) + + if "metadatas" in context: + citations.append( + { + "source": context["source"], + "document": context["documents"][0], + "metadata": context["metadatas"][0], + } + ) + except Exception as e: + log.exception(e) + + return contexts, citations + + +def get_model_path(model: str, update_model: bool = False): + # Construct huggingface_hub kwargs with local_files_only to return the snapshot path + cache_dir = os.getenv("SENTENCE_TRANSFORMERS_HOME") + + local_files_only = not update_model + + snapshot_kwargs = { + "cache_dir": cache_dir, + "local_files_only": local_files_only, + } + + log.debug(f"model: {model}") + log.debug(f"snapshot_kwargs: {snapshot_kwargs}") + + # Inspiration from upstream sentence_transformers + if ( + os.path.exists(model) + or ("\\" in model or model.count("/") > 1) + and local_files_only + ): + # If fully qualified path exists, return input, else set repo_id + return model + elif "/" not in model: + # Set valid repo_id for model short-name + model = "sentence-transformers" + "/" + model + + snapshot_kwargs["repo_id"] = model + + # Attempt to query the huggingface_hub library to determine the local path and/or to update + try: + model_repo_path = snapshot_download(**snapshot_kwargs) + log.debug(f"model_repo_path: {model_repo_path}") + return model_repo_path + except Exception as e: + log.exception(f"Cannot determine model snapshot path: {e}") + return model + + +def generate_openai_embeddings( + model: str, + text: Union[str, list[str]], + key: str, + url: str = "https://api.openai.com/v1", +): + if isinstance(text, list): + embeddings = generate_openai_batch_embeddings(model, text, key, url) + else: + embeddings = generate_openai_batch_embeddings(model, [text], key, url) + + return embeddings[0] if isinstance(text, str) else embeddings + + +def generate_openai_batch_embeddings( + model: str, texts: list[str], key: str, url: str = "https://api.openai.com/v1" +) -> Optional[list[list[float]]]: + try: + r = requests.post( + f"{url}/embeddings", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {key}", + }, + json={"input": texts, "model": model}, + ) + r.raise_for_status() + data = r.json() + if "data" in data: + return [elem["embedding"] for elem in data["data"]] + else: + raise "Something went wrong :/" + except Exception as e: + print(e) + return None + + +from typing import Any + +from langchain_core.retrievers import BaseRetriever +from langchain_core.callbacks import CallbackManagerForRetrieverRun + + +class ChromaRetriever(BaseRetriever): + collection: Any + embedding_function: Any + top_n: int + + def _get_relevant_documents( + self, + query: str, + *, + run_manager: CallbackManagerForRetrieverRun, + ) -> List[Document]: + query_embeddings = self.embedding_function(query) + + results = self.collection.query( + query_embeddings=[query_embeddings], + n_results=self.top_n, + ) + + ids = results["ids"][0] + metadatas = results["metadatas"][0] + documents = results["documents"][0] + + results = [] + for idx in range(len(ids)): + results.append( + Document( + metadata=metadatas[idx], + page_content=documents[idx], + ) + ) + return results + + +import operator + +from typing import Optional, Sequence + +from langchain_core.documents import BaseDocumentCompressor, Document +from langchain_core.callbacks import Callbacks +from langchain_core.pydantic_v1 import Extra + + +class RerankCompressor(BaseDocumentCompressor): + embedding_function: Any + top_n: int + reranking_function: Any + r_score: float + + class Config: + extra = Extra.forbid + arbitrary_types_allowed = True + + def compress_documents( + self, + documents: Sequence[Document], + query: str, + callbacks: Optional[Callbacks] = None, + ) -> Sequence[Document]: + reranking = self.reranking_function is not None + + if reranking: + scores = self.reranking_function.predict( + [(query, doc.page_content) for doc in documents] + ) + else: + from sentence_transformers import util + + query_embedding = self.embedding_function(query) + document_embedding = self.embedding_function( + [doc.page_content for doc in documents] + ) + scores = util.cos_sim(query_embedding, document_embedding)[0] + + docs_with_scores = list(zip(documents, scores.tolist())) + if self.r_score: + docs_with_scores = [ + (d, s) for d, s in docs_with_scores if s >= self.r_score + ] + + result = sorted(docs_with_scores, key=operator.itemgetter(1), reverse=True) + final_results = [] + for doc, doc_score in result[: self.top_n]: + metadata = doc.metadata + metadata["score"] = doc_score + doc = Document( + page_content=doc.page_content, + metadata=metadata, + ) + final_results.append(doc) + return final_results diff --git a/backend/apps/socket/main.py b/backend/apps/socket/main.py new file mode 100644 index 0000000000000000000000000000000000000000..18ce7a6072f64d1783534f5d5103d05f62cc6dcb --- /dev/null +++ b/backend/apps/socket/main.py @@ -0,0 +1,170 @@ +import socketio +import asyncio + + +from apps.webui.models.users import Users +from utils.utils import decode_token + +sio = socketio.AsyncServer(cors_allowed_origins=[], async_mode="asgi") +app = socketio.ASGIApp(sio, socketio_path="/ws/socket.io") + +# Dictionary to maintain the user pool + +SESSION_POOL = {} +USER_POOL = {} +USAGE_POOL = {} +# Timeout duration in seconds +TIMEOUT_DURATION = 3 + + +@sio.event +async def connect(sid, environ, auth): + user = None + if auth and "token" in auth: + data = decode_token(auth["token"]) + + if data is not None and "id" in data: + user = Users.get_user_by_id(data["id"]) + + if user: + SESSION_POOL[sid] = user.id + if user.id in USER_POOL: + USER_POOL[user.id].append(sid) + else: + USER_POOL[user.id] = [sid] + + print(f"user {user.name}({user.id}) connected with session ID {sid}") + + await sio.emit("user-count", {"count": len(set(USER_POOL))}) + await sio.emit("usage", {"models": get_models_in_use()}) + + +@sio.on("user-join") +async def user_join(sid, data): + print("user-join", sid, data) + + auth = data["auth"] if "auth" in data else None + + if auth and "token" in auth: + data = decode_token(auth["token"]) + + if data is not None and "id" in data: + user = Users.get_user_by_id(data["id"]) + + if user: + + SESSION_POOL[sid] = user.id + if user.id in USER_POOL: + USER_POOL[user.id].append(sid) + else: + USER_POOL[user.id] = [sid] + + print(f"user {user.name}({user.id}) connected with session ID {sid}") + + await sio.emit("user-count", {"count": len(set(USER_POOL))}) + + +@sio.on("user-count") +async def user_count(sid): + await sio.emit("user-count", {"count": len(set(USER_POOL))}) + + +def get_models_in_use(): + # Aggregate all models in use + models_in_use = [] + for model_id, data in USAGE_POOL.items(): + models_in_use.append(model_id) + + return models_in_use + + +@sio.on("usage") +async def usage(sid, data): + + model_id = data["model"] + + # Cancel previous callback if there is one + if model_id in USAGE_POOL: + USAGE_POOL[model_id]["callback"].cancel() + + # Store the new usage data and task + + if model_id in USAGE_POOL: + USAGE_POOL[model_id]["sids"].append(sid) + USAGE_POOL[model_id]["sids"] = list(set(USAGE_POOL[model_id]["sids"])) + + else: + USAGE_POOL[model_id] = {"sids": [sid]} + + # Schedule a task to remove the usage data after TIMEOUT_DURATION + USAGE_POOL[model_id]["callback"] = asyncio.create_task( + remove_after_timeout(sid, model_id) + ) + + # Broadcast the usage data to all clients + await sio.emit("usage", {"models": get_models_in_use()}) + + +async def remove_after_timeout(sid, model_id): + try: + await asyncio.sleep(TIMEOUT_DURATION) + if model_id in USAGE_POOL: + print(USAGE_POOL[model_id]["sids"]) + USAGE_POOL[model_id]["sids"].remove(sid) + USAGE_POOL[model_id]["sids"] = list(set(USAGE_POOL[model_id]["sids"])) + + if len(USAGE_POOL[model_id]["sids"]) == 0: + del USAGE_POOL[model_id] + + # Broadcast the usage data to all clients + await sio.emit("usage", {"models": get_models_in_use()}) + except asyncio.CancelledError: + # Task was cancelled due to new 'usage' event + pass + + +@sio.event +async def disconnect(sid): + if sid in SESSION_POOL: + user_id = SESSION_POOL[sid] + del SESSION_POOL[sid] + + USER_POOL[user_id].remove(sid) + + if len(USER_POOL[user_id]) == 0: + del USER_POOL[user_id] + + await sio.emit("user-count", {"count": len(USER_POOL)}) + else: + print(f"Unknown session ID {sid} disconnected") + + +async def get_event_emitter(request_info): + async def __event_emitter__(event_data): + await sio.emit( + "chat-events", + { + "chat_id": request_info["chat_id"], + "message_id": request_info["message_id"], + "data": event_data, + }, + to=request_info["session_id"], + ) + + return __event_emitter__ + + +async def get_event_call(request_info): + async def __event_call__(event_data): + response = await sio.call( + "chat-events", + { + "chat_id": request_info["chat_id"], + "message_id": request_info["message_id"], + "data": event_data, + }, + to=request_info["session_id"], + ) + return response + + return __event_call__ diff --git a/backend/apps/webui/internal/db.py b/backend/apps/webui/internal/db.py new file mode 100644 index 0000000000000000000000000000000000000000..fbe287e185f3a7011f2c73ef894781cf2512a783 --- /dev/null +++ b/backend/apps/webui/internal/db.py @@ -0,0 +1,110 @@ +import os +import logging +import json +from contextlib import contextmanager + +from peewee_migrate import Router +from apps.webui.internal.wrappers import register_connection + +from typing import Optional, Any +from typing_extensions import Self + +from sqlalchemy import create_engine, types, Dialect +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, scoped_session +from sqlalchemy.sql.type_api import _T + +from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["DB"]) + + +class JSONField(types.TypeDecorator): + impl = types.Text + cache_ok = True + + def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any: + return json.dumps(value) + + def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any: + if value is not None: + return json.loads(value) + + def copy(self, **kw: Any) -> Self: + return JSONField(self.impl.length) + + def db_value(self, value): + return json.dumps(value) + + def python_value(self, value): + if value is not None: + return json.loads(value) + + +# Check if the file exists +if os.path.exists(f"{DATA_DIR}/ollama.db"): + # Rename the file + os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db") + log.info("Database migrated from Ollama-WebUI successfully.") +else: + pass + + +# Workaround to handle the peewee migration +# This is required to ensure the peewee migration is handled before the alembic migration +def handle_peewee_migration(DATABASE_URL): + try: + # Replace the postgresql:// with postgres:// and %40 with @ in the DATABASE_URL + db = register_connection( + DATABASE_URL.replace("postgresql://", "postgres://").replace("%40", "@") + ) + migrate_dir = BACKEND_DIR / "apps" / "webui" / "internal" / "migrations" + router = Router(db, logger=log, migrate_dir=migrate_dir) + router.run() + db.close() + + # check if db connection has been closed + + except Exception as e: + log.error(f"Failed to initialize the database connection: {e}") + raise + + finally: + # Properly closing the database connection + if db and not db.is_closed(): + db.close() + + # Assert if db connection has been closed + assert db.is_closed(), "Database connection is still open." + + +handle_peewee_migration(DATABASE_URL) + + +SQLALCHEMY_DATABASE_URL = DATABASE_URL +if "sqlite" in SQLALCHEMY_DATABASE_URL: + engine = create_engine( + SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} + ) +else: + engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True) + + +SessionLocal = sessionmaker( + autocommit=False, autoflush=False, bind=engine, expire_on_commit=False +) +Base = declarative_base() +Session = scoped_session(SessionLocal) + + +# Dependency +def get_session(): + db = SessionLocal() + try: + yield db + finally: + db.close() + + +get_db = contextmanager(get_session) diff --git a/backend/apps/webui/internal/migrations/001_initial_schema.py b/backend/apps/webui/internal/migrations/001_initial_schema.py new file mode 100644 index 0000000000000000000000000000000000000000..93f278f15b842306c6d7e3367c696272c5e9da69 --- /dev/null +++ b/backend/apps/webui/internal/migrations/001_initial_schema.py @@ -0,0 +1,254 @@ +"""Peewee migrations -- 001_initial_schema.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # We perform different migrations for SQLite and other databases + # This is because SQLite is very loose with enforcing its schema, and trying to migrate other databases like SQLite + # will require per-database SQL queries. + # Instead, we assume that because external DB support was added at a later date, it is safe to assume a newer base + # schema instead of trying to migrate from an older schema. + if isinstance(database, pw.SqliteDatabase): + migrate_sqlite(migrator, database, fake=fake) + else: + migrate_external(migrator, database, fake=fake) + + +def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): + @migrator.create_model + class Auth(pw.Model): + id = pw.CharField(max_length=255, unique=True) + email = pw.CharField(max_length=255) + password = pw.CharField(max_length=255) + active = pw.BooleanField() + + class Meta: + table_name = "auth" + + @migrator.create_model + class Chat(pw.Model): + id = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + title = pw.CharField() + chat = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "chat" + + @migrator.create_model + class ChatIdTag(pw.Model): + id = pw.CharField(max_length=255, unique=True) + tag_name = pw.CharField(max_length=255) + chat_id = pw.CharField(max_length=255) + user_id = pw.CharField(max_length=255) + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "chatidtag" + + @migrator.create_model + class Document(pw.Model): + id = pw.AutoField() + collection_name = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255, unique=True) + title = pw.CharField() + filename = pw.CharField() + content = pw.TextField(null=True) + user_id = pw.CharField(max_length=255) + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "document" + + @migrator.create_model + class Modelfile(pw.Model): + id = pw.AutoField() + tag_name = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + modelfile = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "modelfile" + + @migrator.create_model + class Prompt(pw.Model): + id = pw.AutoField() + command = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + title = pw.CharField() + content = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "prompt" + + @migrator.create_model + class Tag(pw.Model): + id = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255) + user_id = pw.CharField(max_length=255) + data = pw.TextField(null=True) + + class Meta: + table_name = "tag" + + @migrator.create_model + class User(pw.Model): + id = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255) + email = pw.CharField(max_length=255) + role = pw.CharField(max_length=255) + profile_image_url = pw.CharField(max_length=255) + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "user" + + +def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False): + @migrator.create_model + class Auth(pw.Model): + id = pw.CharField(max_length=255, unique=True) + email = pw.CharField(max_length=255) + password = pw.TextField() + active = pw.BooleanField() + + class Meta: + table_name = "auth" + + @migrator.create_model + class Chat(pw.Model): + id = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + title = pw.TextField() + chat = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "chat" + + @migrator.create_model + class ChatIdTag(pw.Model): + id = pw.CharField(max_length=255, unique=True) + tag_name = pw.CharField(max_length=255) + chat_id = pw.CharField(max_length=255) + user_id = pw.CharField(max_length=255) + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "chatidtag" + + @migrator.create_model + class Document(pw.Model): + id = pw.AutoField() + collection_name = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255, unique=True) + title = pw.TextField() + filename = pw.TextField() + content = pw.TextField(null=True) + user_id = pw.CharField(max_length=255) + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "document" + + @migrator.create_model + class Modelfile(pw.Model): + id = pw.AutoField() + tag_name = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + modelfile = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "modelfile" + + @migrator.create_model + class Prompt(pw.Model): + id = pw.AutoField() + command = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + title = pw.TextField() + content = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "prompt" + + @migrator.create_model + class Tag(pw.Model): + id = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255) + user_id = pw.CharField(max_length=255) + data = pw.TextField(null=True) + + class Meta: + table_name = "tag" + + @migrator.create_model + class User(pw.Model): + id = pw.CharField(max_length=255, unique=True) + name = pw.CharField(max_length=255) + email = pw.CharField(max_length=255) + role = pw.CharField(max_length=255) + profile_image_url = pw.TextField() + timestamp = pw.BigIntegerField() + + class Meta: + table_name = "user" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("user") + + migrator.remove_model("tag") + + migrator.remove_model("prompt") + + migrator.remove_model("modelfile") + + migrator.remove_model("document") + + migrator.remove_model("chatidtag") + + migrator.remove_model("chat") + + migrator.remove_model("auth") diff --git a/backend/apps/webui/internal/migrations/002_add_local_sharing.py b/backend/apps/webui/internal/migrations/002_add_local_sharing.py new file mode 100644 index 0000000000000000000000000000000000000000..e93501aeec522fc102a3ce26112b2edd0e518455 --- /dev/null +++ b/backend/apps/webui/internal/migrations/002_add_local_sharing.py @@ -0,0 +1,48 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields( + "chat", share_id=pw.CharField(max_length=255, null=True, unique=True) + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("chat", "share_id") diff --git a/backend/apps/webui/internal/migrations/003_add_auth_api_key.py b/backend/apps/webui/internal/migrations/003_add_auth_api_key.py new file mode 100644 index 0000000000000000000000000000000000000000..07144f3aca6688a960f7036bcd2c20470da0881c --- /dev/null +++ b/backend/apps/webui/internal/migrations/003_add_auth_api_key.py @@ -0,0 +1,48 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields( + "user", api_key=pw.CharField(max_length=255, null=True, unique=True) + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("user", "api_key") diff --git a/backend/apps/webui/internal/migrations/004_add_archived.py b/backend/apps/webui/internal/migrations/004_add_archived.py new file mode 100644 index 0000000000000000000000000000000000000000..d01c06b4e665a61c709a8c662387e0c0755efa9a --- /dev/null +++ b/backend/apps/webui/internal/migrations/004_add_archived.py @@ -0,0 +1,46 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields("chat", archived=pw.BooleanField(default=False)) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("chat", "archived") diff --git a/backend/apps/webui/internal/migrations/005_add_updated_at.py b/backend/apps/webui/internal/migrations/005_add_updated_at.py new file mode 100644 index 0000000000000000000000000000000000000000..950866ef024e80fa1b1af6e296d89feb50a5f5f9 --- /dev/null +++ b/backend/apps/webui/internal/migrations/005_add_updated_at.py @@ -0,0 +1,130 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + if isinstance(database, pw.SqliteDatabase): + migrate_sqlite(migrator, database, fake=fake) + else: + migrate_external(migrator, database, fake=fake) + + +def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): + # Adding fields created_at and updated_at to the 'chat' table + migrator.add_fields( + "chat", + created_at=pw.DateTimeField(null=True), # Allow null for transition + updated_at=pw.DateTimeField(null=True), # Allow null for transition + ) + + # Populate the new fields from an existing 'timestamp' field + migrator.sql( + "UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL" + ) + + # Now that the data has been copied, remove the original 'timestamp' field + migrator.remove_fields("chat", "timestamp") + + # Update the fields to be not null now that they are populated + migrator.change_fields( + "chat", + created_at=pw.DateTimeField(null=False), + updated_at=pw.DateTimeField(null=False), + ) + + +def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False): + # Adding fields created_at and updated_at to the 'chat' table + migrator.add_fields( + "chat", + created_at=pw.BigIntegerField(null=True), # Allow null for transition + updated_at=pw.BigIntegerField(null=True), # Allow null for transition + ) + + # Populate the new fields from an existing 'timestamp' field + migrator.sql( + "UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL" + ) + + # Now that the data has been copied, remove the original 'timestamp' field + migrator.remove_fields("chat", "timestamp") + + # Update the fields to be not null now that they are populated + migrator.change_fields( + "chat", + created_at=pw.BigIntegerField(null=False), + updated_at=pw.BigIntegerField(null=False), + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + if isinstance(database, pw.SqliteDatabase): + rollback_sqlite(migrator, database, fake=fake) + else: + rollback_external(migrator, database, fake=fake) + + +def rollback_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): + # Recreate the timestamp field initially allowing null values for safe transition + migrator.add_fields("chat", timestamp=pw.DateTimeField(null=True)) + + # Copy the earliest created_at date back into the new timestamp field + # This assumes created_at was originally a copy of timestamp + migrator.sql("UPDATE chat SET timestamp = created_at") + + # Remove the created_at and updated_at fields + migrator.remove_fields("chat", "created_at", "updated_at") + + # Finally, alter the timestamp field to not allow nulls if that was the original setting + migrator.change_fields("chat", timestamp=pw.DateTimeField(null=False)) + + +def rollback_external(migrator: Migrator, database: pw.Database, *, fake=False): + # Recreate the timestamp field initially allowing null values for safe transition + migrator.add_fields("chat", timestamp=pw.BigIntegerField(null=True)) + + # Copy the earliest created_at date back into the new timestamp field + # This assumes created_at was originally a copy of timestamp + migrator.sql("UPDATE chat SET timestamp = created_at") + + # Remove the created_at and updated_at fields + migrator.remove_fields("chat", "created_at", "updated_at") + + # Finally, alter the timestamp field to not allow nulls if that was the original setting + migrator.change_fields("chat", timestamp=pw.BigIntegerField(null=False)) diff --git a/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py b/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py new file mode 100644 index 0000000000000000000000000000000000000000..caca14d323e1fad148e7e14bae207c7e1b8896a9 --- /dev/null +++ b/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py @@ -0,0 +1,130 @@ +"""Peewee migrations -- 006_migrate_timestamps_and_charfields.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # Alter the tables with timestamps + migrator.change_fields( + "chatidtag", + timestamp=pw.BigIntegerField(), + ) + migrator.change_fields( + "document", + timestamp=pw.BigIntegerField(), + ) + migrator.change_fields( + "modelfile", + timestamp=pw.BigIntegerField(), + ) + migrator.change_fields( + "prompt", + timestamp=pw.BigIntegerField(), + ) + migrator.change_fields( + "user", + timestamp=pw.BigIntegerField(), + ) + # Alter the tables with varchar to text where necessary + migrator.change_fields( + "auth", + password=pw.TextField(), + ) + migrator.change_fields( + "chat", + title=pw.TextField(), + ) + migrator.change_fields( + "document", + title=pw.TextField(), + filename=pw.TextField(), + ) + migrator.change_fields( + "prompt", + title=pw.TextField(), + ) + migrator.change_fields( + "user", + profile_image_url=pw.TextField(), + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + if isinstance(database, pw.SqliteDatabase): + # Alter the tables with timestamps + migrator.change_fields( + "chatidtag", + timestamp=pw.DateField(), + ) + migrator.change_fields( + "document", + timestamp=pw.DateField(), + ) + migrator.change_fields( + "modelfile", + timestamp=pw.DateField(), + ) + migrator.change_fields( + "prompt", + timestamp=pw.DateField(), + ) + migrator.change_fields( + "user", + timestamp=pw.DateField(), + ) + migrator.change_fields( + "auth", + password=pw.CharField(max_length=255), + ) + migrator.change_fields( + "chat", + title=pw.CharField(), + ) + migrator.change_fields( + "document", + title=pw.CharField(), + filename=pw.CharField(), + ) + migrator.change_fields( + "prompt", + title=pw.CharField(), + ) + migrator.change_fields( + "user", + profile_image_url=pw.CharField(), + ) diff --git a/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py b/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py new file mode 100644 index 0000000000000000000000000000000000000000..dd176ba73e51b15f74f45b839d0eb8e72fc63ecf --- /dev/null +++ b/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py @@ -0,0 +1,79 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # Adding fields created_at and updated_at to the 'user' table + migrator.add_fields( + "user", + created_at=pw.BigIntegerField(null=True), # Allow null for transition + updated_at=pw.BigIntegerField(null=True), # Allow null for transition + last_active_at=pw.BigIntegerField(null=True), # Allow null for transition + ) + + # Populate the new fields from an existing 'timestamp' field + migrator.sql( + 'UPDATE "user" SET created_at = timestamp, updated_at = timestamp, last_active_at = timestamp WHERE timestamp IS NOT NULL' + ) + + # Now that the data has been copied, remove the original 'timestamp' field + migrator.remove_fields("user", "timestamp") + + # Update the fields to be not null now that they are populated + migrator.change_fields( + "user", + created_at=pw.BigIntegerField(null=False), + updated_at=pw.BigIntegerField(null=False), + last_active_at=pw.BigIntegerField(null=False), + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + # Recreate the timestamp field initially allowing null values for safe transition + migrator.add_fields("user", timestamp=pw.BigIntegerField(null=True)) + + # Copy the earliest created_at date back into the new timestamp field + # This assumes created_at was originally a copy of timestamp + migrator.sql('UPDATE "user" SET timestamp = created_at') + + # Remove the created_at and updated_at fields + migrator.remove_fields("user", "created_at", "updated_at", "last_active_at") + + # Finally, alter the timestamp field to not allow nulls if that was the original setting + migrator.change_fields("user", timestamp=pw.BigIntegerField(null=False)) diff --git a/backend/apps/webui/internal/migrations/008_add_memory.py b/backend/apps/webui/internal/migrations/008_add_memory.py new file mode 100644 index 0000000000000000000000000000000000000000..9307aa4d5c933cf0ee98c0932ab0e48bc4cecbc6 --- /dev/null +++ b/backend/apps/webui/internal/migrations/008_add_memory.py @@ -0,0 +1,53 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + @migrator.create_model + class Memory(pw.Model): + id = pw.CharField(max_length=255, unique=True) + user_id = pw.CharField(max_length=255) + content = pw.TextField(null=False) + updated_at = pw.BigIntegerField(null=False) + created_at = pw.BigIntegerField(null=False) + + class Meta: + table_name = "memory" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("memory") diff --git a/backend/apps/webui/internal/migrations/009_add_models.py b/backend/apps/webui/internal/migrations/009_add_models.py new file mode 100644 index 0000000000000000000000000000000000000000..548ec7cdcabbc620f8c2c79b65709a6b08d9a11c --- /dev/null +++ b/backend/apps/webui/internal/migrations/009_add_models.py @@ -0,0 +1,61 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + @migrator.create_model + class Model(pw.Model): + id = pw.TextField(unique=True) + user_id = pw.TextField() + base_model_id = pw.TextField(null=True) + + name = pw.TextField() + + meta = pw.TextField() + params = pw.TextField() + + created_at = pw.BigIntegerField(null=False) + updated_at = pw.BigIntegerField(null=False) + + class Meta: + table_name = "model" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("model") diff --git a/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py b/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py new file mode 100644 index 0000000000000000000000000000000000000000..2ef814c06b046d4a5281d2486a41222ca7ac87ad --- /dev/null +++ b/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py @@ -0,0 +1,130 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator +import json + +from utils.misc import parse_ollama_modelfile + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # Fetch data from 'modelfile' table and insert into 'model' table + migrate_modelfile_to_model(migrator, database) + # Drop the 'modelfile' table + migrator.remove_model("modelfile") + + +def migrate_modelfile_to_model(migrator: Migrator, database: pw.Database): + ModelFile = migrator.orm["modelfile"] + Model = migrator.orm["model"] + + modelfiles = ModelFile.select() + + for modelfile in modelfiles: + # Extract and transform data in Python + + modelfile.modelfile = json.loads(modelfile.modelfile) + meta = json.dumps( + { + "description": modelfile.modelfile.get("desc"), + "profile_image_url": modelfile.modelfile.get("imageUrl"), + "ollama": {"modelfile": modelfile.modelfile.get("content")}, + "suggestion_prompts": modelfile.modelfile.get("suggestionPrompts"), + "categories": modelfile.modelfile.get("categories"), + "user": {**modelfile.modelfile.get("user", {}), "community": True}, + } + ) + + info = parse_ollama_modelfile(modelfile.modelfile.get("content")) + + # Insert the processed data into the 'model' table + Model.create( + id=f"ollama-{modelfile.tag_name}", + user_id=modelfile.user_id, + base_model_id=info.get("base_model_id"), + name=modelfile.modelfile.get("title"), + meta=meta, + params=json.dumps(info.get("params", {})), + created_at=modelfile.timestamp, + updated_at=modelfile.timestamp, + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + recreate_modelfile_table(migrator, database) + move_data_back_to_modelfile(migrator, database) + migrator.remove_model("model") + + +def recreate_modelfile_table(migrator: Migrator, database: pw.Database): + query = """ + CREATE TABLE IF NOT EXISTS modelfile ( + user_id TEXT, + tag_name TEXT, + modelfile JSON, + timestamp BIGINT + ) + """ + migrator.sql(query) + + +def move_data_back_to_modelfile(migrator: Migrator, database: pw.Database): + Model = migrator.orm["model"] + Modelfile = migrator.orm["modelfile"] + + models = Model.select() + + for model in models: + # Extract and transform data in Python + meta = json.loads(model.meta) + + modelfile_data = { + "title": model.name, + "desc": meta.get("description"), + "imageUrl": meta.get("profile_image_url"), + "content": meta.get("ollama", {}).get("modelfile"), + "suggestionPrompts": meta.get("suggestion_prompts"), + "categories": meta.get("categories"), + "user": {k: v for k, v in meta.get("user", {}).items() if k != "community"}, + } + + # Insert the processed data back into the 'modelfile' table + Modelfile.create( + user_id=model.user_id, + tag_name=model.id, + modelfile=modelfile_data, + timestamp=model.created_at, + ) diff --git a/backend/apps/webui/internal/migrations/011_add_user_settings.py b/backend/apps/webui/internal/migrations/011_add_user_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..a1620dcadae41891922e324a9a6b152b81ea0ec4 --- /dev/null +++ b/backend/apps/webui/internal/migrations/011_add_user_settings.py @@ -0,0 +1,48 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # Adding fields settings to the 'user' table + migrator.add_fields("user", settings=pw.TextField(null=True)) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + # Remove the settings field + migrator.remove_fields("user", "settings") diff --git a/backend/apps/webui/internal/migrations/012_add_tools.py b/backend/apps/webui/internal/migrations/012_add_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..4a68eea552e4fb9b5d5f64f55e7b7966f342435b --- /dev/null +++ b/backend/apps/webui/internal/migrations/012_add_tools.py @@ -0,0 +1,61 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + @migrator.create_model + class Tool(pw.Model): + id = pw.TextField(unique=True) + user_id = pw.TextField() + + name = pw.TextField() + content = pw.TextField() + specs = pw.TextField() + + meta = pw.TextField() + + created_at = pw.BigIntegerField(null=False) + updated_at = pw.BigIntegerField(null=False) + + class Meta: + table_name = "tool" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("tool") diff --git a/backend/apps/webui/internal/migrations/013_add_user_info.py b/backend/apps/webui/internal/migrations/013_add_user_info.py new file mode 100644 index 0000000000000000000000000000000000000000..0f68669cca869fcd5537d1a0600ed45155056437 --- /dev/null +++ b/backend/apps/webui/internal/migrations/013_add_user_info.py @@ -0,0 +1,48 @@ +"""Peewee migrations -- 002_add_local_sharing.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + # Adding fields info to the 'user' table + migrator.add_fields("user", info=pw.TextField(null=True)) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + # Remove the settings field + migrator.remove_fields("user", "info") diff --git a/backend/apps/webui/internal/migrations/014_add_files.py b/backend/apps/webui/internal/migrations/014_add_files.py new file mode 100644 index 0000000000000000000000000000000000000000..5e1acf0ad8b9510b8d090c6458f292a124dd73cd --- /dev/null +++ b/backend/apps/webui/internal/migrations/014_add_files.py @@ -0,0 +1,55 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + @migrator.create_model + class File(pw.Model): + id = pw.TextField(unique=True) + user_id = pw.TextField() + filename = pw.TextField() + meta = pw.TextField() + created_at = pw.BigIntegerField(null=False) + + class Meta: + table_name = "file" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("file") diff --git a/backend/apps/webui/internal/migrations/015_add_functions.py b/backend/apps/webui/internal/migrations/015_add_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..8316a9333bad45eccf7d6708016ef5c45b208360 --- /dev/null +++ b/backend/apps/webui/internal/migrations/015_add_functions.py @@ -0,0 +1,61 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + @migrator.create_model + class Function(pw.Model): + id = pw.TextField(unique=True) + user_id = pw.TextField() + + name = pw.TextField() + type = pw.TextField() + + content = pw.TextField() + meta = pw.TextField() + + created_at = pw.BigIntegerField(null=False) + updated_at = pw.BigIntegerField(null=False) + + class Meta: + table_name = "function" + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_model("function") diff --git a/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py b/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py new file mode 100644 index 0000000000000000000000000000000000000000..e3af521b7e841ee01b226ad867ae509e42e5eb4f --- /dev/null +++ b/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py @@ -0,0 +1,50 @@ +"""Peewee migrations -- 009_add_models.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields("tool", valves=pw.TextField(null=True)) + migrator.add_fields("function", valves=pw.TextField(null=True)) + migrator.add_fields("function", is_active=pw.BooleanField(default=False)) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("tool", "valves") + migrator.remove_fields("function", "valves") + migrator.remove_fields("function", "is_active") diff --git a/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py b/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py new file mode 100644 index 0000000000000000000000000000000000000000..eaa3fa5fe54bd37691593ba6fc2840b8653d534d --- /dev/null +++ b/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py @@ -0,0 +1,45 @@ +"""Peewee migrations -- 017_add_user_oauth_sub.py. +Some examples (model - class or model name):: + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields( + "user", + oauth_sub=pw.TextField(null=True, unique=True), + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("user", "oauth_sub") diff --git a/backend/apps/webui/internal/migrations/018_add_function_is_global.py b/backend/apps/webui/internal/migrations/018_add_function_is_global.py new file mode 100644 index 0000000000000000000000000000000000000000..04cdab705986a36227a7f73e800d6739f00033ce --- /dev/null +++ b/backend/apps/webui/internal/migrations/018_add_function_is_global.py @@ -0,0 +1,49 @@ +"""Peewee migrations -- 017_add_user_oauth_sub.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['table_name'] # Return model in current state by name + > Model = migrator.ModelClass # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.run(func, *args, **kwargs) # Run python function with the given args + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.add_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + > migrator.add_constraint(model, name, sql) + > migrator.drop_index(model, *col_names) + > migrator.drop_not_null(model, *field_names) + > migrator.drop_constraints(model, *constraints) + +""" + +from contextlib import suppress + +import peewee as pw +from peewee_migrate import Migrator + + +with suppress(ImportError): + import playhouse.postgres_ext as pw_pext + + +def migrate(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your migrations here.""" + + migrator.add_fields( + "function", + is_global=pw.BooleanField(default=False), + ) + + +def rollback(migrator: Migrator, database: pw.Database, *, fake=False): + """Write your rollback migrations here.""" + + migrator.remove_fields("function", "is_global") diff --git a/backend/apps/webui/internal/wrappers.py b/backend/apps/webui/internal/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..2b5551ce2ba833fa57480b19a84984f93673260e --- /dev/null +++ b/backend/apps/webui/internal/wrappers.py @@ -0,0 +1,72 @@ +from contextvars import ContextVar +from peewee import * +from peewee import PostgresqlDatabase, InterfaceError as PeeWeeInterfaceError + +import logging +from playhouse.db_url import connect, parse +from playhouse.shortcuts import ReconnectMixin + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["DB"]) + +db_state_default = {"closed": None, "conn": None, "ctx": None, "transactions": None} +db_state = ContextVar("db_state", default=db_state_default.copy()) + + +class PeeweeConnectionState(object): + def __init__(self, **kwargs): + super().__setattr__("_state", db_state) + super().__init__(**kwargs) + + def __setattr__(self, name, value): + self._state.get()[name] = value + + def __getattr__(self, name): + value = self._state.get()[name] + return value + + +class CustomReconnectMixin(ReconnectMixin): + reconnect_errors = ( + # psycopg2 + (OperationalError, "termin"), + (InterfaceError, "closed"), + # peewee + (PeeWeeInterfaceError, "closed"), + ) + + +class ReconnectingPostgresqlDatabase(CustomReconnectMixin, PostgresqlDatabase): + pass + + +def register_connection(db_url): + db = connect(db_url) + if isinstance(db, PostgresqlDatabase): + # Enable autoconnect for SQLite databases, managed by Peewee + db.autoconnect = True + db.reuse_if_open = True + log.info("Connected to PostgreSQL database") + + # Get the connection details + connection = parse(db_url) + + # Use our custom database class that supports reconnection + db = ReconnectingPostgresqlDatabase( + connection["database"], + user=connection["user"], + password=connection["password"], + host=connection["host"], + port=connection["port"], + ) + db.connect(reuse_if_open=True) + elif isinstance(db, SqliteDatabase): + # Enable autoconnect for SQLite databases, managed by Peewee + db.autoconnect = True + db.reuse_if_open = True + log.info("Connected to SQLite database") + else: + raise ValueError("Unsupported database connection") + return db diff --git a/backend/apps/webui/main.py b/backend/apps/webui/main.py new file mode 100644 index 0000000000000000000000000000000000000000..848c798c574d2372eeab1fe75917d6f1164b3907 --- /dev/null +++ b/backend/apps/webui/main.py @@ -0,0 +1,484 @@ +from fastapi import FastAPI, Depends +from fastapi.routing import APIRoute +from fastapi.responses import StreamingResponse +from fastapi.middleware.cors import CORSMiddleware +from starlette.middleware.sessions import SessionMiddleware +from sqlalchemy.orm import Session +from apps.webui.routers import ( + auths, + users, + chats, + documents, + tools, + models, + prompts, + configs, + memories, + utils, + files, + functions, +) +from apps.webui.models.functions import Functions +from apps.webui.models.models import Models +from apps.webui.utils import load_function_module_by_id + +from utils.misc import stream_message_template +from utils.task import prompt_template + + +from config import ( + WEBUI_BUILD_HASH, + SHOW_ADMIN_DETAILS, + ADMIN_EMAIL, + WEBUI_AUTH, + DEFAULT_MODELS, + DEFAULT_PROMPT_SUGGESTIONS, + DEFAULT_USER_ROLE, + ENABLE_SIGNUP, + ENABLE_LOGIN_FORM, + USER_PERMISSIONS, + WEBHOOK_URL, + WEBUI_AUTH_TRUSTED_EMAIL_HEADER, + WEBUI_AUTH_TRUSTED_NAME_HEADER, + JWT_EXPIRES_IN, + WEBUI_BANNERS, + ENABLE_COMMUNITY_SHARING, + OAUTH_PROVIDERS, + AppConfig, + OAUTH_USERNAME_CLAIM, + OAUTH_PICTURE_CLAIM, +) + +from apps.socket.main import get_event_call, get_event_emitter + +import inspect +import uuid +import time +import json + +from typing import Iterator, Generator, AsyncGenerator, Optional +from pydantic import BaseModel + +app = FastAPI() + +origins = ["*"] + +app.state.config = AppConfig() + +app.state.config.ENABLE_SIGNUP = ENABLE_SIGNUP +app.state.config.ENABLE_LOGIN_FORM = ENABLE_LOGIN_FORM +app.state.config.JWT_EXPIRES_IN = JWT_EXPIRES_IN +app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER +app.state.AUTH_TRUSTED_NAME_HEADER = WEBUI_AUTH_TRUSTED_NAME_HEADER + + +app.state.config.SHOW_ADMIN_DETAILS = SHOW_ADMIN_DETAILS +app.state.config.ADMIN_EMAIL = ADMIN_EMAIL + + +app.state.config.DEFAULT_MODELS = DEFAULT_MODELS +app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS +app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE +app.state.config.USER_PERMISSIONS = USER_PERMISSIONS +app.state.config.WEBHOOK_URL = WEBHOOK_URL +app.state.config.BANNERS = WEBUI_BANNERS + +app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING + +app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM +app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM + +app.state.MODELS = {} +app.state.TOOLS = {} +app.state.FUNCTIONS = {} + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +app.include_router(configs.router, prefix="/configs", tags=["configs"]) +app.include_router(auths.router, prefix="/auths", tags=["auths"]) +app.include_router(users.router, prefix="/users", tags=["users"]) +app.include_router(chats.router, prefix="/chats", tags=["chats"]) + +app.include_router(documents.router, prefix="/documents", tags=["documents"]) +app.include_router(models.router, prefix="/models", tags=["models"]) +app.include_router(prompts.router, prefix="/prompts", tags=["prompts"]) + +app.include_router(memories.router, prefix="/memories", tags=["memories"]) +app.include_router(files.router, prefix="/files", tags=["files"]) +app.include_router(tools.router, prefix="/tools", tags=["tools"]) +app.include_router(functions.router, prefix="/functions", tags=["functions"]) + +app.include_router(utils.router, prefix="/utils", tags=["utils"]) + + +@app.get("/") +async def get_status(): + return { + "status": True, + "auth": WEBUI_AUTH, + "default_models": app.state.config.DEFAULT_MODELS, + "default_prompt_suggestions": app.state.config.DEFAULT_PROMPT_SUGGESTIONS, + } + + +async def get_pipe_models(): + pipes = Functions.get_functions_by_type("pipe", active_only=True) + pipe_models = [] + + for pipe in pipes: + # Check if function is already loaded + if pipe.id not in app.state.FUNCTIONS: + function_module, function_type, frontmatter = load_function_module_by_id( + pipe.id + ) + app.state.FUNCTIONS[pipe.id] = function_module + else: + function_module = app.state.FUNCTIONS[pipe.id] + + if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): + valves = Functions.get_function_valves_by_id(pipe.id) + function_module.valves = function_module.Valves( + **(valves if valves else {}) + ) + + # Check if function is a manifold + if hasattr(function_module, "type"): + if function_module.type == "manifold": + manifold_pipes = [] + + # Check if pipes is a function or a list + if callable(function_module.pipes): + manifold_pipes = function_module.pipes() + else: + manifold_pipes = function_module.pipes + + for p in manifold_pipes: + manifold_pipe_id = f'{pipe.id}.{p["id"]}' + manifold_pipe_name = p["name"] + + if hasattr(function_module, "name"): + manifold_pipe_name = ( + f"{function_module.name}{manifold_pipe_name}" + ) + + pipe_flag = {"type": pipe.type} + if hasattr(function_module, "ChatValves"): + pipe_flag["valves_spec"] = function_module.ChatValves.schema() + + pipe_models.append( + { + "id": manifold_pipe_id, + "name": manifold_pipe_name, + "object": "model", + "created": pipe.created_at, + "owned_by": "openai", + "pipe": pipe_flag, + } + ) + else: + pipe_flag = {"type": "pipe"} + if hasattr(function_module, "ChatValves"): + pipe_flag["valves_spec"] = function_module.ChatValves.schema() + + pipe_models.append( + { + "id": pipe.id, + "name": pipe.name, + "object": "model", + "created": pipe.created_at, + "owned_by": "openai", + "pipe": pipe_flag, + } + ) + + return pipe_models + + +async def generate_function_chat_completion(form_data, user): + model_id = form_data.get("model") + model_info = Models.get_model_by_id(model_id) + + metadata = None + if "metadata" in form_data: + metadata = form_data["metadata"] + del form_data["metadata"] + + __event_emitter__ = None + __event_call__ = None + __task__ = None + + if metadata: + if ( + metadata.get("session_id") + and metadata.get("chat_id") + and metadata.get("message_id") + ): + __event_emitter__ = await get_event_emitter(metadata) + __event_call__ = await get_event_call(metadata) + + if metadata.get("task"): + __task__ = metadata.get("task") + + if model_info: + if model_info.base_model_id: + form_data["model"] = model_info.base_model_id + + model_info.params = model_info.params.model_dump() + + if model_info.params: + if model_info.params.get("temperature", None) is not None: + form_data["temperature"] = float(model_info.params.get("temperature")) + + if model_info.params.get("top_p", None): + form_data["top_p"] = int(model_info.params.get("top_p", None)) + + if model_info.params.get("max_tokens", None): + form_data["max_tokens"] = int(model_info.params.get("max_tokens", None)) + + if model_info.params.get("frequency_penalty", None): + form_data["frequency_penalty"] = int( + model_info.params.get("frequency_penalty", None) + ) + + if model_info.params.get("seed", None): + form_data["seed"] = model_info.params.get("seed", None) + + if model_info.params.get("stop", None): + form_data["stop"] = ( + [ + bytes(stop, "utf-8").decode("unicode_escape") + for stop in model_info.params["stop"] + ] + if model_info.params.get("stop", None) + else None + ) + + system = model_info.params.get("system", None) + if system: + system = prompt_template( + system, + **( + { + "user_name": user.name, + "user_location": ( + user.info.get("location") if user.info else None + ), + } + if user + else {} + ), + ) + # Check if the payload already has a system message + # If not, add a system message to the payload + if form_data.get("messages"): + for message in form_data["messages"]: + if message.get("role") == "system": + message["content"] = system + message["content"] + break + else: + form_data["messages"].insert( + 0, + { + "role": "system", + "content": system, + }, + ) + + else: + pass + + async def job(): + pipe_id = form_data["model"] + if "." in pipe_id: + pipe_id, sub_pipe_id = pipe_id.split(".", 1) + print(pipe_id) + + # Check if function is already loaded + if pipe_id not in app.state.FUNCTIONS: + function_module, function_type, frontmatter = load_function_module_by_id( + pipe_id + ) + app.state.FUNCTIONS[pipe_id] = function_module + else: + function_module = app.state.FUNCTIONS[pipe_id] + + if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): + + valves = Functions.get_function_valves_by_id(pipe_id) + function_module.valves = function_module.Valves( + **(valves if valves else {}) + ) + + pipe = function_module.pipe + + # Get the signature of the function + sig = inspect.signature(pipe) + params = {"body": form_data} + + if "__user__" in sig.parameters: + __user__ = { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + } + + try: + if hasattr(function_module, "UserValves"): + __user__["valves"] = function_module.UserValves( + **Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id) + ) + except Exception as e: + print(e) + + params = {**params, "__user__": __user__} + + if "__event_emitter__" in sig.parameters: + params = {**params, "__event_emitter__": __event_emitter__} + + if "__event_call__" in sig.parameters: + params = {**params, "__event_call__": __event_call__} + + if "__task__" in sig.parameters: + params = {**params, "__task__": __task__} + + if form_data["stream"]: + + async def stream_content(): + try: + if inspect.iscoroutinefunction(pipe): + res = await pipe(**params) + else: + res = pipe(**params) + + # Directly return if the response is a StreamingResponse + if isinstance(res, StreamingResponse): + async for data in res.body_iterator: + yield data + return + if isinstance(res, dict): + yield f"data: {json.dumps(res)}\n\n" + return + + except Exception as e: + print(f"Error: {e}") + yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n" + return + + if isinstance(res, str): + message = stream_message_template(form_data["model"], res) + yield f"data: {json.dumps(message)}\n\n" + + if isinstance(res, Iterator): + for line in res: + if isinstance(line, BaseModel): + line = line.model_dump_json() + line = f"data: {line}" + if isinstance(line, dict): + line = f"data: {json.dumps(line)}" + + try: + line = line.decode("utf-8") + except: + pass + + if line.startswith("data:"): + yield f"{line}\n\n" + else: + line = stream_message_template(form_data["model"], line) + yield f"data: {json.dumps(line)}\n\n" + + if isinstance(res, str) or isinstance(res, Generator): + finish_message = { + "id": f"{form_data['model']}-{str(uuid.uuid4())}", + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": form_data["model"], + "choices": [ + { + "index": 0, + "delta": {}, + "logprobs": None, + "finish_reason": "stop", + } + ], + } + + yield f"data: {json.dumps(finish_message)}\n\n" + yield f"data: [DONE]" + + if isinstance(res, AsyncGenerator): + async for line in res: + if isinstance(line, BaseModel): + line = line.model_dump_json() + line = f"data: {line}" + if isinstance(line, dict): + line = f"data: {json.dumps(line)}" + + try: + line = line.decode("utf-8") + except: + pass + + if line.startswith("data:"): + yield f"{line}\n\n" + else: + line = stream_message_template(form_data["model"], line) + yield f"data: {json.dumps(line)}\n\n" + + return StreamingResponse(stream_content(), media_type="text/event-stream") + else: + + try: + if inspect.iscoroutinefunction(pipe): + res = await pipe(**params) + else: + res = pipe(**params) + + if isinstance(res, StreamingResponse): + return res + except Exception as e: + print(f"Error: {e}") + return {"error": {"detail": str(e)}} + + if isinstance(res, dict): + return res + elif isinstance(res, BaseModel): + return res.model_dump() + else: + message = "" + if isinstance(res, str): + message = res + elif isinstance(res, Generator): + for stream in res: + message = f"{message}{stream}" + elif isinstance(res, AsyncGenerator): + async for stream in res: + message = f"{message}{stream}" + + return { + "id": f"{form_data['model']}-{str(uuid.uuid4())}", + "object": "chat.completion", + "created": int(time.time()), + "model": form_data["model"], + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": message, + }, + "logprobs": None, + "finish_reason": "stop", + } + ], + } + + return await job() diff --git a/backend/apps/webui/models/auths.py b/backend/apps/webui/models/auths.py new file mode 100644 index 0000000000000000000000000000000000000000..bcea4a367ae2d328fd69e770363fcc400083c866 --- /dev/null +++ b/backend/apps/webui/models/auths.py @@ -0,0 +1,207 @@ +from pydantic import BaseModel +from typing import Optional +import uuid +import logging +from sqlalchemy import String, Column, Boolean, Text + +from apps.webui.models.users import UserModel, Users +from utils.utils import verify_password + +from apps.webui.internal.db import Base, get_db + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# DB MODEL +#################### + + +class Auth(Base): + __tablename__ = "auth" + + id = Column(String, primary_key=True) + email = Column(String) + password = Column(Text) + active = Column(Boolean) + + +class AuthModel(BaseModel): + id: str + email: str + password: str + active: bool = True + + +#################### +# Forms +#################### + + +class Token(BaseModel): + token: str + token_type: str + + +class ApiKey(BaseModel): + api_key: Optional[str] = None + + +class UserResponse(BaseModel): + id: str + email: str + name: str + role: str + profile_image_url: str + + +class SigninResponse(Token, UserResponse): + pass + + +class SigninForm(BaseModel): + email: str + password: str + + +class ProfileImageUrlForm(BaseModel): + profile_image_url: str + + +class UpdateProfileForm(BaseModel): + profile_image_url: str + name: str + + +class UpdatePasswordForm(BaseModel): + password: str + new_password: str + + +class SignupForm(BaseModel): + name: str + email: str + password: str + profile_image_url: Optional[str] = "/user.png" + + +class AddUserForm(SignupForm): + role: Optional[str] = "pending" + + +class AuthsTable: + + def insert_new_auth( + self, + email: str, + password: str, + name: str, + profile_image_url: str = "/user.png", + role: str = "pending", + oauth_sub: Optional[str] = None, + ) -> Optional[UserModel]: + with get_db() as db: + + log.info("insert_new_auth") + + id = str(uuid.uuid4()) + + auth = AuthModel( + **{"id": id, "email": email, "password": password, "active": True} + ) + result = Auth(**auth.model_dump()) + db.add(result) + + user = Users.insert_new_user( + id, name, email, profile_image_url, role, oauth_sub + ) + + db.commit() + db.refresh(result) + + if result and user: + return user + else: + return None + + def authenticate_user(self, email: str, password: str) -> Optional[UserModel]: + log.info(f"authenticate_user: {email}") + try: + with get_db() as db: + + auth = db.query(Auth).filter_by(email=email, active=True).first() + if auth: + if verify_password(password, auth.password): + user = Users.get_user_by_id(auth.id) + return user + else: + return None + else: + return None + except: + return None + + def authenticate_user_by_api_key(self, api_key: str) -> Optional[UserModel]: + log.info(f"authenticate_user_by_api_key: {api_key}") + # if no api_key, return None + if not api_key: + return None + + try: + user = Users.get_user_by_api_key(api_key) + return user if user else None + except: + return False + + def authenticate_user_by_trusted_header(self, email: str) -> Optional[UserModel]: + log.info(f"authenticate_user_by_trusted_header: {email}") + try: + with get_db() as db: + auth = db.query(Auth).filter_by(email=email, active=True).first() + if auth: + user = Users.get_user_by_id(auth.id) + return user + except: + return None + + def update_user_password_by_id(self, id: str, new_password: str) -> bool: + try: + with get_db() as db: + result = ( + db.query(Auth).filter_by(id=id).update({"password": new_password}) + ) + db.commit() + return True if result == 1 else False + except: + return False + + def update_email_by_id(self, id: str, email: str) -> bool: + try: + with get_db() as db: + result = db.query(Auth).filter_by(id=id).update({"email": email}) + db.commit() + return True if result == 1 else False + except: + return False + + def delete_auth_by_id(self, id: str) -> bool: + try: + with get_db() as db: + + # Delete User + result = Users.delete_user_by_id(id) + + if result: + db.query(Auth).filter_by(id=id).delete() + db.commit() + + return True + else: + return False + except: + return False + + +Auths = AuthsTable() diff --git a/backend/apps/webui/models/chats.py b/backend/apps/webui/models/chats.py new file mode 100644 index 0000000000000000000000000000000000000000..abde4f2b31996bd59d38de964e8a5f81b6aabee0 --- /dev/null +++ b/backend/apps/webui/models/chats.py @@ -0,0 +1,406 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Union, Optional + +import json +import uuid +import time + +from sqlalchemy import Column, String, BigInteger, Boolean, Text + +from apps.webui.internal.db import Base, get_db + + +#################### +# Chat DB Schema +#################### + + +class Chat(Base): + __tablename__ = "chat" + + id = Column(String, primary_key=True) + user_id = Column(String) + title = Column(Text) + chat = Column(Text) # Save Chat JSON as Text + + created_at = Column(BigInteger) + updated_at = Column(BigInteger) + + share_id = Column(Text, unique=True, nullable=True) + archived = Column(Boolean, default=False) + + +class ChatModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + + id: str + user_id: str + title: str + chat: str + + created_at: int # timestamp in epoch + updated_at: int # timestamp in epoch + + share_id: Optional[str] = None + archived: bool = False + + +#################### +# Forms +#################### + + +class ChatForm(BaseModel): + chat: dict + + +class ChatTitleForm(BaseModel): + title: str + + +class ChatResponse(BaseModel): + id: str + user_id: str + title: str + chat: dict + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + share_id: Optional[str] = None # id of the chat to be shared + archived: bool + + +class ChatTitleIdResponse(BaseModel): + id: str + title: str + updated_at: int + created_at: int + + +class ChatTable: + + def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]: + with get_db() as db: + + id = str(uuid.uuid4()) + chat = ChatModel( + **{ + "id": id, + "user_id": user_id, + "title": ( + form_data.chat["title"] + if "title" in form_data.chat + else "New Chat" + ), + "chat": json.dumps(form_data.chat), + "created_at": int(time.time()), + "updated_at": int(time.time()), + } + ) + + result = Chat(**chat.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + return ChatModel.model_validate(result) if result else None + + def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat_obj = db.get(Chat, id) + chat_obj.chat = json.dumps(chat) + chat_obj.title = chat["title"] if "title" in chat else "New Chat" + chat_obj.updated_at = int(time.time()) + db.commit() + db.refresh(chat_obj) + + return ChatModel.model_validate(chat_obj) + except Exception as e: + return None + + def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: + with get_db() as db: + + # Get the existing chat to share + chat = db.get(Chat, chat_id) + # Check if the chat is already shared + if chat.share_id: + return self.get_chat_by_id_and_user_id(chat.share_id, "shared") + # Create a new chat with the same data, but with a new ID + shared_chat = ChatModel( + **{ + "id": str(uuid.uuid4()), + "user_id": f"shared-{chat_id}", + "title": chat.title, + "chat": chat.chat, + "created_at": chat.created_at, + "updated_at": int(time.time()), + } + ) + shared_result = Chat(**shared_chat.model_dump()) + db.add(shared_result) + db.commit() + db.refresh(shared_result) + + # Update the original chat with the share_id + result = ( + db.query(Chat) + .filter_by(id=chat_id) + .update({"share_id": shared_chat.id}) + ) + db.commit() + return shared_chat if (shared_result and result) else None + + def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: + try: + with get_db() as db: + + print("update_shared_chat_by_id") + chat = db.get(Chat, chat_id) + print(chat) + chat.title = chat.title + chat.chat = chat.chat + db.commit() + db.refresh(chat) + + return self.get_chat_by_id(chat.share_id) + except: + return None + + def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool: + try: + with get_db() as db: + + db.query(Chat).filter_by(user_id=f"shared-{chat_id}").delete() + db.commit() + + return True + except: + return False + + def update_chat_share_id_by_id( + self, id: str, share_id: Optional[str] + ) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat = db.get(Chat, id) + chat.share_id = share_id + db.commit() + db.refresh(chat) + return ChatModel.model_validate(chat) + except: + return None + + def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat = db.get(Chat, id) + chat.archived = not chat.archived + db.commit() + db.refresh(chat) + return ChatModel.model_validate(chat) + except: + return None + + def archive_all_chats_by_user_id(self, user_id: str) -> bool: + try: + with get_db() as db: + db.query(Chat).filter_by(user_id=user_id).update({"archived": True}) + db.commit() + return True + except: + return False + + def get_archived_chat_list_by_user_id( + self, user_id: str, skip: int = 0, limit: int = 50 + ) -> List[ChatModel]: + with get_db() as db: + + all_chats = ( + db.query(Chat) + .filter_by(user_id=user_id, archived=True) + .order_by(Chat.updated_at.desc()) + # .limit(limit).offset(skip) + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def get_chat_list_by_user_id( + self, + user_id: str, + include_archived: bool = False, + skip: int = 0, + limit: int = 50, + ) -> List[ChatModel]: + with get_db() as db: + query = db.query(Chat).filter_by(user_id=user_id) + if not include_archived: + query = query.filter_by(archived=False) + all_chats = ( + query.order_by(Chat.updated_at.desc()) + # .limit(limit).offset(skip) + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def get_chat_title_id_list_by_user_id( + self, + user_id: str, + include_archived: bool = False, + skip: int = 0, + limit: int = 50, + ) -> List[ChatTitleIdResponse]: + with get_db() as db: + query = db.query(Chat).filter_by(user_id=user_id) + if not include_archived: + query = query.filter_by(archived=False) + + all_chats = ( + query.order_by(Chat.updated_at.desc()) + # limit cols + .with_entities( + Chat.id, Chat.title, Chat.updated_at, Chat.created_at + ).all() + ) + # result has to be destrctured from sqlalchemy `row` and mapped to a dict since the `ChatModel`is not the returned dataclass. + return [ + ChatTitleIdResponse.model_validate( + { + "id": chat[0], + "title": chat[1], + "updated_at": chat[2], + "created_at": chat[3], + } + ) + for chat in all_chats + ] + + def get_chat_list_by_chat_ids( + self, chat_ids: List[str], skip: int = 0, limit: int = 50 + ) -> List[ChatModel]: + with get_db() as db: + all_chats = ( + db.query(Chat) + .filter(Chat.id.in_(chat_ids)) + .filter_by(archived=False) + .order_by(Chat.updated_at.desc()) + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def get_chat_by_id(self, id: str) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat = db.get(Chat, id) + return ChatModel.model_validate(chat) + except: + return None + + def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat = db.query(Chat).filter_by(share_id=id).first() + + if chat: + return self.get_chat_by_id(id) + else: + return None + except Exception as e: + return None + + def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]: + try: + with get_db() as db: + + chat = db.query(Chat).filter_by(id=id, user_id=user_id).first() + return ChatModel.model_validate(chat) + except: + return None + + def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]: + with get_db() as db: + + all_chats = ( + db.query(Chat) + # .limit(limit).offset(skip) + .order_by(Chat.updated_at.desc()) + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]: + with get_db() as db: + + all_chats = ( + db.query(Chat) + .filter_by(user_id=user_id) + .order_by(Chat.updated_at.desc()) + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def get_archived_chats_by_user_id(self, user_id: str) -> List[ChatModel]: + with get_db() as db: + + all_chats = ( + db.query(Chat) + .filter_by(user_id=user_id, archived=True) + .order_by(Chat.updated_at.desc()) + ) + return [ChatModel.model_validate(chat) for chat in all_chats] + + def delete_chat_by_id(self, id: str) -> bool: + try: + with get_db() as db: + + db.query(Chat).filter_by(id=id).delete() + db.commit() + + return True and self.delete_shared_chat_by_chat_id(id) + except: + return False + + def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool: + try: + with get_db() as db: + + db.query(Chat).filter_by(id=id, user_id=user_id).delete() + db.commit() + + return True and self.delete_shared_chat_by_chat_id(id) + except: + return False + + def delete_chats_by_user_id(self, user_id: str) -> bool: + try: + + with get_db() as db: + + self.delete_shared_chats_by_user_id(user_id) + + db.query(Chat).filter_by(user_id=user_id).delete() + db.commit() + + return True + except: + return False + + def delete_shared_chats_by_user_id(self, user_id: str) -> bool: + try: + + with get_db() as db: + + chats_by_user = db.query(Chat).filter_by(user_id=user_id).all() + shared_chat_ids = [f"shared-{chat.id}" for chat in chats_by_user] + + db.query(Chat).filter(Chat.user_id.in_(shared_chat_ids)).delete() + db.commit() + + return True + except: + return False + + +Chats = ChatTable() diff --git a/backend/apps/webui/models/documents.py b/backend/apps/webui/models/documents.py new file mode 100644 index 0000000000000000000000000000000000000000..ac8655da9ce4ee2fd50299aaaf1a9a6bdba86b58 --- /dev/null +++ b/backend/apps/webui/models/documents.py @@ -0,0 +1,167 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Optional +import time +import logging + +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, get_db + +import json + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# Documents DB Schema +#################### + + +class Document(Base): + __tablename__ = "document" + + collection_name = Column(String, primary_key=True) + name = Column(String, unique=True) + title = Column(Text) + filename = Column(Text) + content = Column(Text, nullable=True) + user_id = Column(String) + timestamp = Column(BigInteger) + + +class DocumentModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + + collection_name: str + name: str + title: str + filename: str + content: Optional[str] = None + user_id: str + timestamp: int # timestamp in epoch + + +#################### +# Forms +#################### + + +class DocumentResponse(BaseModel): + collection_name: str + name: str + title: str + filename: str + content: Optional[dict] = None + user_id: str + timestamp: int # timestamp in epoch + + +class DocumentUpdateForm(BaseModel): + name: str + title: str + + +class DocumentForm(DocumentUpdateForm): + collection_name: str + filename: str + content: Optional[str] = None + + +class DocumentsTable: + + def insert_new_doc( + self, user_id: str, form_data: DocumentForm + ) -> Optional[DocumentModel]: + with get_db() as db: + + document = DocumentModel( + **{ + **form_data.model_dump(), + "user_id": user_id, + "timestamp": int(time.time()), + } + ) + + try: + result = Document(**document.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return DocumentModel.model_validate(result) + else: + return None + except: + return None + + def get_doc_by_name(self, name: str) -> Optional[DocumentModel]: + try: + with get_db() as db: + + document = db.query(Document).filter_by(name=name).first() + return DocumentModel.model_validate(document) if document else None + except: + return None + + def get_docs(self) -> List[DocumentModel]: + with get_db() as db: + + return [ + DocumentModel.model_validate(doc) for doc in db.query(Document).all() + ] + + def update_doc_by_name( + self, name: str, form_data: DocumentUpdateForm + ) -> Optional[DocumentModel]: + try: + with get_db() as db: + + db.query(Document).filter_by(name=name).update( + { + "title": form_data.title, + "name": form_data.name, + "timestamp": int(time.time()), + } + ) + db.commit() + return self.get_doc_by_name(form_data.name) + except Exception as e: + log.exception(e) + return None + + def update_doc_content_by_name( + self, name: str, updated: dict + ) -> Optional[DocumentModel]: + try: + doc = self.get_doc_by_name(name) + doc_content = json.loads(doc.content if doc.content else "{}") + doc_content = {**doc_content, **updated} + + with get_db() as db: + + db.query(Document).filter_by(name=name).update( + { + "content": json.dumps(doc_content), + "timestamp": int(time.time()), + } + ) + db.commit() + return self.get_doc_by_name(name) + except Exception as e: + log.exception(e) + return None + + def delete_doc_by_name(self, name: str) -> bool: + try: + with get_db() as db: + + db.query(Document).filter_by(name=name).delete() + db.commit() + return True + except: + return False + + +Documents = DocumentsTable() diff --git a/backend/apps/webui/models/files.py b/backend/apps/webui/models/files.py new file mode 100644 index 0000000000000000000000000000000000000000..16272f24ad11f9fda4047f2c2721123cdea3a2aa --- /dev/null +++ b/backend/apps/webui/models/files.py @@ -0,0 +1,126 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Union, Optional +import time +import logging + +from sqlalchemy import Column, String, BigInteger, Text + +from apps.webui.internal.db import JSONField, Base, get_db + +import json + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# Files DB Schema +#################### + + +class File(Base): + __tablename__ = "file" + + id = Column(String, primary_key=True) + user_id = Column(String) + filename = Column(Text) + meta = Column(JSONField) + created_at = Column(BigInteger) + + +class FileModel(BaseModel): + id: str + user_id: str + filename: str + meta: dict + created_at: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class FileModelResponse(BaseModel): + id: str + user_id: str + filename: str + meta: dict + created_at: int # timestamp in epoch + + +class FileForm(BaseModel): + id: str + filename: str + meta: dict = {} + + +class FilesTable: + + def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]: + with get_db() as db: + + file = FileModel( + **{ + **form_data.model_dump(), + "user_id": user_id, + "created_at": int(time.time()), + } + ) + + try: + result = File(**file.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return FileModel.model_validate(result) + else: + return None + except Exception as e: + print(f"Error creating tool: {e}") + return None + + def get_file_by_id(self, id: str) -> Optional[FileModel]: + with get_db() as db: + + try: + file = db.get(File, id) + return FileModel.model_validate(file) + except: + return None + + def get_files(self) -> List[FileModel]: + with get_db() as db: + + return [FileModel.model_validate(file) for file in db.query(File).all()] + + def delete_file_by_id(self, id: str) -> bool: + + with get_db() as db: + + try: + db.query(File).filter_by(id=id).delete() + db.commit() + + return True + except: + return False + + def delete_all_files(self) -> bool: + + with get_db() as db: + + try: + db.query(File).delete() + db.commit() + + return True + except: + return False + + +Files = FilesTable() diff --git a/backend/apps/webui/models/functions.py b/backend/apps/webui/models/functions.py new file mode 100644 index 0000000000000000000000000000000000000000..cb73da69449482a35264e688748422323880583b --- /dev/null +++ b/backend/apps/webui/models/functions.py @@ -0,0 +1,288 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Union, Optional +import time +import logging + +from sqlalchemy import Column, String, Text, BigInteger, Boolean + +from apps.webui.internal.db import JSONField, Base, get_db +from apps.webui.models.users import Users + +import json +import copy + + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# Functions DB Schema +#################### + + +class Function(Base): + __tablename__ = "function" + + id = Column(String, primary_key=True) + user_id = Column(String) + name = Column(Text) + type = Column(Text) + content = Column(Text) + meta = Column(JSONField) + valves = Column(JSONField) + is_active = Column(Boolean) + is_global = Column(Boolean) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) + + +class FunctionMeta(BaseModel): + description: Optional[str] = None + manifest: Optional[dict] = {} + + +class FunctionModel(BaseModel): + id: str + user_id: str + name: str + type: str + content: str + meta: FunctionMeta + is_active: bool = False + is_global: bool = False + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class FunctionResponse(BaseModel): + id: str + user_id: str + type: str + name: str + meta: FunctionMeta + is_active: bool + is_global: bool + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + +class FunctionForm(BaseModel): + id: str + name: str + content: str + meta: FunctionMeta + + +class FunctionValves(BaseModel): + valves: Optional[dict] = None + + +class FunctionsTable: + + def insert_new_function( + self, user_id: str, type: str, form_data: FunctionForm + ) -> Optional[FunctionModel]: + + function = FunctionModel( + **{ + **form_data.model_dump(), + "user_id": user_id, + "type": type, + "updated_at": int(time.time()), + "created_at": int(time.time()), + } + ) + + try: + with get_db() as db: + result = Function(**function.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return FunctionModel.model_validate(result) + else: + return None + except Exception as e: + print(f"Error creating tool: {e}") + return None + + def get_function_by_id(self, id: str) -> Optional[FunctionModel]: + try: + with get_db() as db: + + function = db.get(Function, id) + return FunctionModel.model_validate(function) + except: + return None + + def get_functions(self, active_only=False) -> List[FunctionModel]: + with get_db() as db: + + if active_only: + return [ + FunctionModel.model_validate(function) + for function in db.query(Function).filter_by(is_active=True).all() + ] + else: + return [ + FunctionModel.model_validate(function) + for function in db.query(Function).all() + ] + + def get_functions_by_type( + self, type: str, active_only=False + ) -> List[FunctionModel]: + with get_db() as db: + + if active_only: + return [ + FunctionModel.model_validate(function) + for function in db.query(Function) + .filter_by(type=type, is_active=True) + .all() + ] + else: + return [ + FunctionModel.model_validate(function) + for function in db.query(Function).filter_by(type=type).all() + ] + + def get_global_filter_functions(self) -> List[FunctionModel]: + with get_db() as db: + + return [ + FunctionModel.model_validate(function) + for function in db.query(Function) + .filter_by(type="filter", is_active=True, is_global=True) + .all() + ] + + def get_global_action_functions(self) -> List[FunctionModel]: + with get_db() as db: + return [ + FunctionModel.model_validate(function) + for function in db.query(Function) + .filter_by(type="action", is_active=True, is_global=True) + .all() + ] + + def get_function_valves_by_id(self, id: str) -> Optional[dict]: + with get_db() as db: + + try: + function = db.get(Function, id) + return function.valves if function.valves else {} + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_function_valves_by_id( + self, id: str, valves: dict + ) -> Optional[FunctionValves]: + with get_db() as db: + + try: + function = db.get(Function, id) + function.valves = valves + function.updated_at = int(time.time()) + db.commit() + db.refresh(function) + return self.get_function_by_id(id) + except: + return None + + def get_user_valves_by_id_and_user_id( + self, id: str, user_id: str + ) -> Optional[dict]: + + try: + user = Users.get_user_by_id(user_id) + user_settings = user.settings.model_dump() if user.settings else {} + + # Check if user has "functions" and "valves" settings + if "functions" not in user_settings: + user_settings["functions"] = {} + if "valves" not in user_settings["functions"]: + user_settings["functions"]["valves"] = {} + + return user_settings["functions"]["valves"].get(id, {}) + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_user_valves_by_id_and_user_id( + self, id: str, user_id: str, valves: dict + ) -> Optional[dict]: + + try: + user = Users.get_user_by_id(user_id) + user_settings = user.settings.model_dump() if user.settings else {} + + # Check if user has "functions" and "valves" settings + if "functions" not in user_settings: + user_settings["functions"] = {} + if "valves" not in user_settings["functions"]: + user_settings["functions"]["valves"] = {} + + user_settings["functions"]["valves"][id] = valves + + # Update the user settings in the database + Users.update_user_by_id(user_id, {"settings": user_settings}) + + return user_settings["functions"]["valves"][id] + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]: + with get_db() as db: + + try: + db.query(Function).filter_by(id=id).update( + { + **updated, + "updated_at": int(time.time()), + } + ) + db.commit() + return self.get_function_by_id(id) + except: + return None + + def deactivate_all_functions(self) -> Optional[bool]: + with get_db() as db: + + try: + db.query(Function).update( + { + "is_active": False, + "updated_at": int(time.time()), + } + ) + db.commit() + return True + except: + return None + + def delete_function_by_id(self, id: str) -> bool: + with get_db() as db: + try: + db.query(Function).filter_by(id=id).delete() + db.commit() + + return True + except: + return False + + +Functions = FunctionsTable() diff --git a/backend/apps/webui/models/memories.py b/backend/apps/webui/models/memories.py new file mode 100644 index 0000000000000000000000000000000000000000..02d4b6924986465612fd2d32be0499ffd3ff3f93 --- /dev/null +++ b/backend/apps/webui/models/memories.py @@ -0,0 +1,148 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Union, Optional + +from sqlalchemy import Column, String, BigInteger, Text + +from apps.webui.internal.db import Base, get_db + +import time +import uuid + +#################### +# Memory DB Schema +#################### + + +class Memory(Base): + __tablename__ = "memory" + + id = Column(String, primary_key=True) + user_id = Column(String) + content = Column(Text) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) + + +class MemoryModel(BaseModel): + id: str + user_id: str + content: str + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class MemoriesTable: + + def insert_new_memory( + self, + user_id: str, + content: str, + ) -> Optional[MemoryModel]: + + with get_db() as db: + id = str(uuid.uuid4()) + + memory = MemoryModel( + **{ + "id": id, + "user_id": user_id, + "content": content, + "created_at": int(time.time()), + "updated_at": int(time.time()), + } + ) + result = Memory(**memory.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return MemoryModel.model_validate(result) + else: + return None + + def update_memory_by_id( + self, + id: str, + content: str, + ) -> Optional[MemoryModel]: + with get_db() as db: + + try: + db.query(Memory).filter_by(id=id).update( + {"content": content, "updated_at": int(time.time())} + ) + db.commit() + return self.get_memory_by_id(id) + except: + return None + + def get_memories(self) -> List[MemoryModel]: + with get_db() as db: + + try: + memories = db.query(Memory).all() + return [MemoryModel.model_validate(memory) for memory in memories] + except: + return None + + def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]: + with get_db() as db: + + try: + memories = db.query(Memory).filter_by(user_id=user_id).all() + return [MemoryModel.model_validate(memory) for memory in memories] + except: + return None + + def get_memory_by_id(self, id: str) -> Optional[MemoryModel]: + with get_db() as db: + + try: + memory = db.get(Memory, id) + return MemoryModel.model_validate(memory) + except: + return None + + def delete_memory_by_id(self, id: str) -> bool: + with get_db() as db: + + try: + db.query(Memory).filter_by(id=id).delete() + db.commit() + + return True + + except: + return False + + def delete_memories_by_user_id(self, user_id: str) -> bool: + with get_db() as db: + + try: + db.query(Memory).filter_by(user_id=user_id).delete() + db.commit() + + return True + except: + return False + + def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool: + with get_db() as db: + + try: + db.query(Memory).filter_by(id=id, user_id=user_id).delete() + db.commit() + + return True + except: + return False + + +Memories = MemoriesTable() diff --git a/backend/apps/webui/models/models.py b/backend/apps/webui/models/models.py new file mode 100644 index 0000000000000000000000000000000000000000..3b128c7d6751c9213e84942f35127b727eb0cac1 --- /dev/null +++ b/backend/apps/webui/models/models.py @@ -0,0 +1,190 @@ +import json +import logging +from typing import Optional + +from pydantic import BaseModel, ConfigDict +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, JSONField, get_db + +from typing import List, Union, Optional +from config import SRC_LOG_LEVELS + +import time + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + + +#################### +# Models DB Schema +#################### + + +# ModelParams is a model for the data stored in the params field of the Model table +class ModelParams(BaseModel): + model_config = ConfigDict(extra="allow") + pass + + +# ModelMeta is a model for the data stored in the meta field of the Model table +class ModelMeta(BaseModel): + profile_image_url: Optional[str] = "/static/favicon.png" + + description: Optional[str] = None + """ + User-facing description of the model. + """ + + capabilities: Optional[dict] = None + + model_config = ConfigDict(extra="allow") + + pass + + +class Model(Base): + __tablename__ = "model" + + id = Column(Text, primary_key=True) + """ + The model's id as used in the API. If set to an existing model, it will override the model. + """ + user_id = Column(Text) + + base_model_id = Column(Text, nullable=True) + """ + An optional pointer to the actual model that should be used when proxying requests. + """ + + name = Column(Text) + """ + The human-readable display name of the model. + """ + + params = Column(JSONField) + """ + Holds a JSON encoded blob of parameters, see `ModelParams`. + """ + + meta = Column(JSONField) + """ + Holds a JSON encoded blob of metadata, see `ModelMeta`. + """ + + updated_at = Column(BigInteger) + created_at = Column(BigInteger) + + +class ModelModel(BaseModel): + id: str + user_id: str + base_model_id: Optional[str] = None + + name: str + params: ModelParams + meta: ModelMeta + + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class ModelResponse(BaseModel): + id: str + name: str + meta: ModelMeta + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + +class ModelForm(BaseModel): + id: str + base_model_id: Optional[str] = None + name: str + meta: ModelMeta + params: ModelParams + + +class ModelsTable: + + def insert_new_model( + self, form_data: ModelForm, user_id: str + ) -> Optional[ModelModel]: + model = ModelModel( + **{ + **form_data.model_dump(), + "user_id": user_id, + "created_at": int(time.time()), + "updated_at": int(time.time()), + } + ) + try: + + with get_db() as db: + + result = Model(**model.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + + if result: + return ModelModel.model_validate(result) + else: + return None + except Exception as e: + print(e) + return None + + def get_all_models(self) -> List[ModelModel]: + with get_db() as db: + + return [ModelModel.model_validate(model) for model in db.query(Model).all()] + + def get_model_by_id(self, id: str) -> Optional[ModelModel]: + try: + with get_db() as db: + + model = db.get(Model, id) + return ModelModel.model_validate(model) + except: + return None + + def update_model_by_id(self, id: str, model: ModelForm) -> Optional[ModelModel]: + try: + with get_db() as db: + # update only the fields that are present in the model + result = ( + db.query(Model) + .filter_by(id=id) + .update(model.model_dump(exclude={"id"}, exclude_none=True)) + ) + db.commit() + + model = db.get(Model, id) + db.refresh(model) + return ModelModel.model_validate(model) + except Exception as e: + print(e) + + return None + + def delete_model_by_id(self, id: str) -> bool: + try: + with get_db() as db: + + db.query(Model).filter_by(id=id).delete() + db.commit() + + return True + except: + return False + + +Models = ModelsTable() diff --git a/backend/apps/webui/models/prompts.py b/backend/apps/webui/models/prompts.py new file mode 100644 index 0000000000000000000000000000000000000000..b8467b63164f537a6e669c5c078a95eb2d162339 --- /dev/null +++ b/backend/apps/webui/models/prompts.py @@ -0,0 +1,119 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Optional +import time + +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, get_db + +import json + +#################### +# Prompts DB Schema +#################### + + +class Prompt(Base): + __tablename__ = "prompt" + + command = Column(String, primary_key=True) + user_id = Column(String) + title = Column(Text) + content = Column(Text) + timestamp = Column(BigInteger) + + +class PromptModel(BaseModel): + command: str + user_id: str + title: str + content: str + timestamp: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class PromptForm(BaseModel): + command: str + title: str + content: str + + +class PromptsTable: + + def insert_new_prompt( + self, user_id: str, form_data: PromptForm + ) -> Optional[PromptModel]: + prompt = PromptModel( + **{ + "user_id": user_id, + "command": form_data.command, + "title": form_data.title, + "content": form_data.content, + "timestamp": int(time.time()), + } + ) + + try: + with get_db() as db: + + result = Prompt(**prompt.dict()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return PromptModel.model_validate(result) + else: + return None + except Exception as e: + return None + + def get_prompt_by_command(self, command: str) -> Optional[PromptModel]: + try: + with get_db() as db: + + prompt = db.query(Prompt).filter_by(command=command).first() + return PromptModel.model_validate(prompt) + except: + return None + + def get_prompts(self) -> List[PromptModel]: + with get_db() as db: + + return [ + PromptModel.model_validate(prompt) for prompt in db.query(Prompt).all() + ] + + def update_prompt_by_command( + self, command: str, form_data: PromptForm + ) -> Optional[PromptModel]: + try: + with get_db() as db: + + prompt = db.query(Prompt).filter_by(command=command).first() + prompt.title = form_data.title + prompt.content = form_data.content + prompt.timestamp = int(time.time()) + db.commit() + return PromptModel.model_validate(prompt) + except: + return None + + def delete_prompt_by_command(self, command: str) -> bool: + try: + with get_db() as db: + + db.query(Prompt).filter_by(command=command).delete() + db.commit() + + return True + except: + return False + + +Prompts = PromptsTable() diff --git a/backend/apps/webui/models/tags.py b/backend/apps/webui/models/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..7285b6fe245fc39099040d0a4f568017eee09797 --- /dev/null +++ b/backend/apps/webui/models/tags.py @@ -0,0 +1,272 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Optional + +import json +import uuid +import time +import logging + +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, get_db + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# Tag DB Schema +#################### + + +class Tag(Base): + __tablename__ = "tag" + + id = Column(String, primary_key=True) + name = Column(String) + user_id = Column(String) + data = Column(Text, nullable=True) + + +class ChatIdTag(Base): + __tablename__ = "chatidtag" + + id = Column(String, primary_key=True) + tag_name = Column(String) + chat_id = Column(String) + user_id = Column(String) + timestamp = Column(BigInteger) + + +class TagModel(BaseModel): + id: str + name: str + user_id: str + data: Optional[str] = None + + model_config = ConfigDict(from_attributes=True) + + +class ChatIdTagModel(BaseModel): + id: str + tag_name: str + chat_id: str + user_id: str + timestamp: int + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class ChatIdTagForm(BaseModel): + tag_name: str + chat_id: str + + +class TagChatIdsResponse(BaseModel): + chat_ids: List[str] + + +class ChatTagsResponse(BaseModel): + tags: List[str] + + +class TagTable: + + def insert_new_tag(self, name: str, user_id: str) -> Optional[TagModel]: + with get_db() as db: + + id = str(uuid.uuid4()) + tag = TagModel(**{"id": id, "user_id": user_id, "name": name}) + try: + result = Tag(**tag.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return TagModel.model_validate(result) + else: + return None + except Exception as e: + return None + + def get_tag_by_name_and_user_id( + self, name: str, user_id: str + ) -> Optional[TagModel]: + try: + with get_db() as db: + tag = db.query(Tag).filter_by(name=name, user_id=user_id).first() + return TagModel.model_validate(tag) + except Exception as e: + return None + + def add_tag_to_chat( + self, user_id: str, form_data: ChatIdTagForm + ) -> Optional[ChatIdTagModel]: + tag = self.get_tag_by_name_and_user_id(form_data.tag_name, user_id) + if tag == None: + tag = self.insert_new_tag(form_data.tag_name, user_id) + + id = str(uuid.uuid4()) + chatIdTag = ChatIdTagModel( + **{ + "id": id, + "user_id": user_id, + "chat_id": form_data.chat_id, + "tag_name": tag.name, + "timestamp": int(time.time()), + } + ) + try: + with get_db() as db: + result = ChatIdTag(**chatIdTag.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return ChatIdTagModel.model_validate(result) + else: + return None + except: + return None + + def get_tags_by_user_id(self, user_id: str) -> List[TagModel]: + with get_db() as db: + tag_names = [ + chat_id_tag.tag_name + for chat_id_tag in ( + db.query(ChatIdTag) + .filter_by(user_id=user_id) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) + ] + + return [ + TagModel.model_validate(tag) + for tag in ( + db.query(Tag) + .filter_by(user_id=user_id) + .filter(Tag.name.in_(tag_names)) + .all() + ) + ] + + def get_tags_by_chat_id_and_user_id( + self, chat_id: str, user_id: str + ) -> List[TagModel]: + with get_db() as db: + + tag_names = [ + chat_id_tag.tag_name + for chat_id_tag in ( + db.query(ChatIdTag) + .filter_by(user_id=user_id, chat_id=chat_id) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) + ] + + return [ + TagModel.model_validate(tag) + for tag in ( + db.query(Tag) + .filter_by(user_id=user_id) + .filter(Tag.name.in_(tag_names)) + .all() + ) + ] + + def get_chat_ids_by_tag_name_and_user_id( + self, tag_name: str, user_id: str + ) -> List[ChatIdTagModel]: + with get_db() as db: + + return [ + ChatIdTagModel.model_validate(chat_id_tag) + for chat_id_tag in ( + db.query(ChatIdTag) + .filter_by(user_id=user_id, tag_name=tag_name) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) + ] + + def count_chat_ids_by_tag_name_and_user_id( + self, tag_name: str, user_id: str + ) -> int: + with get_db() as db: + + return ( + db.query(ChatIdTag) + .filter_by(tag_name=tag_name, user_id=user_id) + .count() + ) + + def delete_tag_by_tag_name_and_user_id(self, tag_name: str, user_id: str) -> bool: + try: + with get_db() as db: + res = ( + db.query(ChatIdTag) + .filter_by(tag_name=tag_name, user_id=user_id) + .delete() + ) + log.debug(f"res: {res}") + db.commit() + + tag_count = self.count_chat_ids_by_tag_name_and_user_id( + tag_name, user_id + ) + if tag_count == 0: + # Remove tag item from Tag col as well + db.query(Tag).filter_by(name=tag_name, user_id=user_id).delete() + db.commit() + return True + except Exception as e: + log.error(f"delete_tag: {e}") + return False + + def delete_tag_by_tag_name_and_chat_id_and_user_id( + self, tag_name: str, chat_id: str, user_id: str + ) -> bool: + try: + with get_db() as db: + + res = ( + db.query(ChatIdTag) + .filter_by(tag_name=tag_name, chat_id=chat_id, user_id=user_id) + .delete() + ) + log.debug(f"res: {res}") + db.commit() + + tag_count = self.count_chat_ids_by_tag_name_and_user_id( + tag_name, user_id + ) + if tag_count == 0: + # Remove tag item from Tag col as well + db.query(Tag).filter_by(name=tag_name, user_id=user_id).delete() + db.commit() + + return True + except Exception as e: + log.error(f"delete_tag: {e}") + return False + + def delete_tags_by_chat_id_and_user_id(self, chat_id: str, user_id: str) -> bool: + tags = self.get_tags_by_chat_id_and_user_id(chat_id, user_id) + + for tag in tags: + self.delete_tag_by_tag_name_and_chat_id_and_user_id( + tag.tag_name, chat_id, user_id + ) + + return True + + +Tags = TagTable() diff --git a/backend/apps/webui/models/tools.py b/backend/apps/webui/models/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..685ce6fcfbd3b721482a4bf62888402fc73a9137 --- /dev/null +++ b/backend/apps/webui/models/tools.py @@ -0,0 +1,213 @@ +from pydantic import BaseModel, ConfigDict +from typing import List, Optional +import time +import logging +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, JSONField, get_db +from apps.webui.models.users import Users + +import json +import copy + + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +#################### +# Tools DB Schema +#################### + + +class Tool(Base): + __tablename__ = "tool" + + id = Column(String, primary_key=True) + user_id = Column(String) + name = Column(Text) + content = Column(Text) + specs = Column(JSONField) + meta = Column(JSONField) + valves = Column(JSONField) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) + + +class ToolMeta(BaseModel): + description: Optional[str] = None + manifest: Optional[dict] = {} + + +class ToolModel(BaseModel): + id: str + user_id: str + name: str + content: str + specs: List[dict] + meta: ToolMeta + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class ToolResponse(BaseModel): + id: str + user_id: str + name: str + meta: ToolMeta + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + +class ToolForm(BaseModel): + id: str + name: str + content: str + meta: ToolMeta + + +class ToolValves(BaseModel): + valves: Optional[dict] = None + + +class ToolsTable: + + def insert_new_tool( + self, user_id: str, form_data: ToolForm, specs: List[dict] + ) -> Optional[ToolModel]: + + with get_db() as db: + + tool = ToolModel( + **{ + **form_data.model_dump(), + "specs": specs, + "user_id": user_id, + "updated_at": int(time.time()), + "created_at": int(time.time()), + } + ) + + try: + result = Tool(**tool.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return ToolModel.model_validate(result) + else: + return None + except Exception as e: + print(f"Error creating tool: {e}") + return None + + def get_tool_by_id(self, id: str) -> Optional[ToolModel]: + try: + with get_db() as db: + + tool = db.get(Tool, id) + return ToolModel.model_validate(tool) + except: + return None + + def get_tools(self) -> List[ToolModel]: + with get_db() as db: + return [ToolModel.model_validate(tool) for tool in db.query(Tool).all()] + + def get_tool_valves_by_id(self, id: str) -> Optional[dict]: + try: + with get_db() as db: + + tool = db.get(Tool, id) + return tool.valves if tool.valves else {} + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_tool_valves_by_id(self, id: str, valves: dict) -> Optional[ToolValves]: + try: + with get_db() as db: + + db.query(Tool).filter_by(id=id).update( + {"valves": valves, "updated_at": int(time.time())} + ) + db.commit() + return self.get_tool_by_id(id) + except: + return None + + def get_user_valves_by_id_and_user_id( + self, id: str, user_id: str + ) -> Optional[dict]: + try: + user = Users.get_user_by_id(user_id) + user_settings = user.settings.model_dump() if user.settings else {} + + # Check if user has "tools" and "valves" settings + if "tools" not in user_settings: + user_settings["tools"] = {} + if "valves" not in user_settings["tools"]: + user_settings["tools"]["valves"] = {} + + return user_settings["tools"]["valves"].get(id, {}) + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_user_valves_by_id_and_user_id( + self, id: str, user_id: str, valves: dict + ) -> Optional[dict]: + try: + user = Users.get_user_by_id(user_id) + user_settings = user.settings.model_dump() if user.settings else {} + + # Check if user has "tools" and "valves" settings + if "tools" not in user_settings: + user_settings["tools"] = {} + if "valves" not in user_settings["tools"]: + user_settings["tools"]["valves"] = {} + + user_settings["tools"]["valves"][id] = valves + + # Update the user settings in the database + Users.update_user_by_id(user_id, {"settings": user_settings}) + + return user_settings["tools"]["valves"][id] + except Exception as e: + print(f"An error occurred: {e}") + return None + + def update_tool_by_id(self, id: str, updated: dict) -> Optional[ToolModel]: + try: + with get_db() as db: + db.query(Tool).filter_by(id=id).update( + {**updated, "updated_at": int(time.time())} + ) + db.commit() + + tool = db.query(Tool).get(id) + db.refresh(tool) + return ToolModel.model_validate(tool) + except: + return None + + def delete_tool_by_id(self, id: str) -> bool: + try: + with get_db() as db: + db.query(Tool).filter_by(id=id).delete() + db.commit() + + return True + except: + return False + + +Tools = ToolsTable() diff --git a/backend/apps/webui/models/users.py b/backend/apps/webui/models/users.py new file mode 100644 index 0000000000000000000000000000000000000000..2f30cda0230292c86ab1a6fb89010ae9d419b610 --- /dev/null +++ b/backend/apps/webui/models/users.py @@ -0,0 +1,269 @@ +from pydantic import BaseModel, ConfigDict, parse_obj_as +from typing import List, Union, Optional +import time + +from sqlalchemy import String, Column, BigInteger, Text + +from utils.misc import get_gravatar_url + +from apps.webui.internal.db import Base, JSONField, Session, get_db +from apps.webui.models.chats import Chats + +#################### +# User DB Schema +#################### + + +class User(Base): + __tablename__ = "user" + + id = Column(String, primary_key=True) + name = Column(String) + email = Column(String) + role = Column(String) + profile_image_url = Column(Text) + + last_active_at = Column(BigInteger) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) + + api_key = Column(String, nullable=True, unique=True) + settings = Column(JSONField, nullable=True) + info = Column(JSONField, nullable=True) + + oauth_sub = Column(Text, unique=True) + + +class UserSettings(BaseModel): + ui: Optional[dict] = {} + model_config = ConfigDict(extra="allow") + pass + + +class UserModel(BaseModel): + id: str + name: str + email: str + role: str = "pending" + profile_image_url: str + + last_active_at: int # timestamp in epoch + updated_at: int # timestamp in epoch + created_at: int # timestamp in epoch + + api_key: Optional[str] = None + settings: Optional[UserSettings] = None + info: Optional[dict] = None + + oauth_sub: Optional[str] = None + + model_config = ConfigDict(from_attributes=True) + + +#################### +# Forms +#################### + + +class UserRoleUpdateForm(BaseModel): + id: str + role: str + + +class UserUpdateForm(BaseModel): + name: str + email: str + profile_image_url: str + password: Optional[str] = None + + +class UsersTable: + + def insert_new_user( + self, + id: str, + name: str, + email: str, + profile_image_url: str = "/user.png", + role: str = "pending", + oauth_sub: Optional[str] = None, + ) -> Optional[UserModel]: + with get_db() as db: + user = UserModel( + **{ + "id": id, + "name": name, + "email": email, + "role": role, + "profile_image_url": profile_image_url, + "last_active_at": int(time.time()), + "created_at": int(time.time()), + "updated_at": int(time.time()), + "oauth_sub": oauth_sub, + } + ) + result = User(**user.model_dump()) + db.add(result) + db.commit() + db.refresh(result) + if result: + return user + else: + return None + + def get_user_by_id(self, id: str) -> Optional[UserModel]: + try: + with get_db() as db: + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except Exception as e: + return None + + def get_user_by_api_key(self, api_key: str) -> Optional[UserModel]: + try: + with get_db() as db: + + user = db.query(User).filter_by(api_key=api_key).first() + return UserModel.model_validate(user) + except: + return None + + def get_user_by_email(self, email: str) -> Optional[UserModel]: + try: + with get_db() as db: + + user = db.query(User).filter_by(email=email).first() + return UserModel.model_validate(user) + except: + return None + + def get_user_by_oauth_sub(self, sub: str) -> Optional[UserModel]: + try: + with get_db() as db: + + user = db.query(User).filter_by(oauth_sub=sub).first() + return UserModel.model_validate(user) + except: + return None + + def get_users(self, skip: int = 0, limit: int = 50) -> List[UserModel]: + with get_db() as db: + users = ( + db.query(User) + # .offset(skip).limit(limit) + .all() + ) + return [UserModel.model_validate(user) for user in users] + + def get_num_users(self) -> Optional[int]: + with get_db() as db: + return db.query(User).count() + + def get_first_user(self) -> UserModel: + try: + with get_db() as db: + user = db.query(User).order_by(User.created_at).first() + return UserModel.model_validate(user) + except: + return None + + def update_user_role_by_id(self, id: str, role: str) -> Optional[UserModel]: + try: + with get_db() as db: + db.query(User).filter_by(id=id).update({"role": role}) + db.commit() + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except: + return None + + def update_user_profile_image_url_by_id( + self, id: str, profile_image_url: str + ) -> Optional[UserModel]: + try: + with get_db() as db: + db.query(User).filter_by(id=id).update( + {"profile_image_url": profile_image_url} + ) + db.commit() + + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except: + return None + + def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]: + try: + with get_db() as db: + + db.query(User).filter_by(id=id).update( + {"last_active_at": int(time.time())} + ) + db.commit() + + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except: + return None + + def update_user_oauth_sub_by_id( + self, id: str, oauth_sub: str + ) -> Optional[UserModel]: + try: + with get_db() as db: + db.query(User).filter_by(id=id).update({"oauth_sub": oauth_sub}) + db.commit() + + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except: + return None + + def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]: + try: + with get_db() as db: + db.query(User).filter_by(id=id).update(updated) + db.commit() + + user = db.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + # return UserModel(**user.dict()) + except Exception as e: + return None + + def delete_user_by_id(self, id: str) -> bool: + try: + # Delete User Chats + result = Chats.delete_chats_by_user_id(id) + + if result: + with get_db() as db: + # Delete User + db.query(User).filter_by(id=id).delete() + db.commit() + + return True + else: + return False + except: + return False + + def update_user_api_key_by_id(self, id: str, api_key: str) -> str: + try: + with get_db() as db: + result = db.query(User).filter_by(id=id).update({"api_key": api_key}) + db.commit() + return True if result == 1 else False + except: + return False + + def get_user_api_key_by_id(self, id: str) -> Optional[str]: + try: + with get_db() as db: + user = db.query(User).filter_by(id=id).first() + return user.api_key + except Exception as e: + return None + + +Users = UsersTable() diff --git a/backend/apps/webui/routers/auths.py b/backend/apps/webui/routers/auths.py new file mode 100644 index 0000000000000000000000000000000000000000..e2d6a5036ffb4cf4528203e3a2df37343c7c44e9 --- /dev/null +++ b/backend/apps/webui/routers/auths.py @@ -0,0 +1,429 @@ +import logging + +from fastapi import Request, UploadFile, File +from fastapi import Depends, HTTPException, status +from fastapi.responses import Response + +from fastapi import APIRouter +from pydantic import BaseModel +import re +import uuid +import csv + +from apps.webui.models.auths import ( + SigninForm, + SignupForm, + AddUserForm, + UpdateProfileForm, + UpdatePasswordForm, + UserResponse, + SigninResponse, + Auths, + ApiKey, +) +from apps.webui.models.users import Users + +from utils.utils import ( + get_password_hash, + get_current_user, + get_admin_user, + create_token, + create_api_key, +) +from utils.misc import parse_duration, validate_email_format +from utils.webhook import post_webhook +from constants import ERROR_MESSAGES, WEBHOOK_MESSAGES +from config import ( + WEBUI_AUTH, + WEBUI_AUTH_TRUSTED_EMAIL_HEADER, + WEBUI_AUTH_TRUSTED_NAME_HEADER, +) + +router = APIRouter() + +############################ +# GetSessionUser +############################ + + +@router.get("/", response_model=UserResponse) +async def get_session_user( + request: Request, response: Response, user=Depends(get_current_user) +): + token = create_token( + data={"id": user.id}, + expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN), + ) + + # Set the cookie token + response.set_cookie( + key="token", + value=token, + httponly=True, # Ensures the cookie is not accessible via JavaScript + ) + + return { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + "profile_image_url": user.profile_image_url, + } + + +############################ +# Update Profile +############################ + + +@router.post("/update/profile", response_model=UserResponse) +async def update_profile( + form_data: UpdateProfileForm, session_user=Depends(get_current_user) +): + if session_user: + user = Users.update_user_by_id( + session_user.id, + {"profile_image_url": form_data.profile_image_url, "name": form_data.name}, + ) + if user: + return user + else: + raise HTTPException(400, detail=ERROR_MESSAGES.DEFAULT()) + else: + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + + +############################ +# Update Password +############################ + + +@router.post("/update/password", response_model=bool) +async def update_password( + form_data: UpdatePasswordForm, session_user=Depends(get_current_user) +): + if WEBUI_AUTH_TRUSTED_EMAIL_HEADER: + raise HTTPException(400, detail=ERROR_MESSAGES.ACTION_PROHIBITED) + if session_user: + user = Auths.authenticate_user(session_user.email, form_data.password) + + if user: + hashed = get_password_hash(form_data.new_password) + return Auths.update_user_password_by_id(user.id, hashed) + else: + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_PASSWORD) + else: + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + + +############################ +# SignIn +############################ + + +@router.post("/signin", response_model=SigninResponse) +async def signin(request: Request, response: Response, form_data: SigninForm): + if WEBUI_AUTH_TRUSTED_EMAIL_HEADER: + if WEBUI_AUTH_TRUSTED_EMAIL_HEADER not in request.headers: + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_TRUSTED_HEADER) + + trusted_email = request.headers[WEBUI_AUTH_TRUSTED_EMAIL_HEADER].lower() + trusted_name = trusted_email + if WEBUI_AUTH_TRUSTED_NAME_HEADER: + trusted_name = request.headers.get( + WEBUI_AUTH_TRUSTED_NAME_HEADER, trusted_email + ) + if not Users.get_user_by_email(trusted_email.lower()): + await signup( + request, + response, + SignupForm( + email=trusted_email, password=str(uuid.uuid4()), name=trusted_name + ), + ) + user = Auths.authenticate_user_by_trusted_header(trusted_email) + elif WEBUI_AUTH == False: + admin_email = "admin@localhost" + admin_password = "admin" + + if Users.get_user_by_email(admin_email.lower()): + user = Auths.authenticate_user(admin_email.lower(), admin_password) + else: + if Users.get_num_users() != 0: + raise HTTPException(400, detail=ERROR_MESSAGES.EXISTING_USERS) + + await signup( + request, + response, + SignupForm(email=admin_email, password=admin_password, name="User"), + ) + + user = Auths.authenticate_user(admin_email.lower(), admin_password) + else: + user = Auths.authenticate_user(form_data.email.lower(), form_data.password) + + if user: + token = create_token( + data={"id": user.id}, + expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN), + ) + + # Set the cookie token + response.set_cookie( + key="token", + value=token, + httponly=True, # Ensures the cookie is not accessible via JavaScript + ) + + return { + "token": token, + "token_type": "Bearer", + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + "profile_image_url": user.profile_image_url, + } + else: + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + + +############################ +# SignUp +############################ + + +@router.post("/signup", response_model=SigninResponse) +async def signup(request: Request, response: Response, form_data: SignupForm): + if not request.app.state.config.ENABLE_SIGNUP and WEBUI_AUTH: + raise HTTPException( + status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED + ) + + if not validate_email_format(form_data.email.lower()): + raise HTTPException( + status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT + ) + + if Users.get_user_by_email(form_data.email.lower()): + raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN) + + try: + role = ( + "admin" + if Users.get_num_users() == 0 + else request.app.state.config.DEFAULT_USER_ROLE + ) + hashed = get_password_hash(form_data.password) + user = Auths.insert_new_auth( + form_data.email.lower(), + hashed, + form_data.name, + form_data.profile_image_url, + role, + ) + + if user: + token = create_token( + data={"id": user.id}, + expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN), + ) + # response.set_cookie(key='token', value=token, httponly=True) + + # Set the cookie token + response.set_cookie( + key="token", + value=token, + httponly=True, # Ensures the cookie is not accessible via JavaScript + ) + + if request.app.state.config.WEBHOOK_URL: + post_webhook( + request.app.state.config.WEBHOOK_URL, + WEBHOOK_MESSAGES.USER_SIGNUP(user.name), + { + "action": "signup", + "message": WEBHOOK_MESSAGES.USER_SIGNUP(user.name), + "user": user.model_dump_json(exclude_none=True), + }, + ) + + return { + "token": token, + "token_type": "Bearer", + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + "profile_image_url": user.profile_image_url, + } + else: + raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_USER_ERROR) + except Exception as err: + raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err)) + + +############################ +# AddUser +############################ + + +@router.post("/add", response_model=SigninResponse) +async def add_user(form_data: AddUserForm, user=Depends(get_admin_user)): + + if not validate_email_format(form_data.email.lower()): + raise HTTPException( + status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT + ) + + if Users.get_user_by_email(form_data.email.lower()): + raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN) + + try: + + print(form_data) + hashed = get_password_hash(form_data.password) + user = Auths.insert_new_auth( + form_data.email.lower(), + hashed, + form_data.name, + form_data.profile_image_url, + form_data.role, + ) + + if user: + token = create_token(data={"id": user.id}) + return { + "token": token, + "token_type": "Bearer", + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + "profile_image_url": user.profile_image_url, + } + else: + raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_USER_ERROR) + except Exception as err: + raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err)) + + +############################ +# GetAdminDetails +############################ + + +@router.get("/admin/details") +async def get_admin_details(request: Request, user=Depends(get_current_user)): + if request.app.state.config.SHOW_ADMIN_DETAILS: + admin_email = request.app.state.config.ADMIN_EMAIL + admin_name = None + + print(admin_email, admin_name) + + if admin_email: + admin = Users.get_user_by_email(admin_email) + if admin: + admin_name = admin.name + else: + admin = Users.get_first_user() + if admin: + admin_email = admin.email + admin_name = admin.name + + return { + "name": admin_name, + "email": admin_email, + } + else: + raise HTTPException(400, detail=ERROR_MESSAGES.ACTION_PROHIBITED) + + +############################ +# ToggleSignUp +############################ + + +@router.get("/admin/config") +async def get_admin_config(request: Request, user=Depends(get_admin_user)): + return { + "SHOW_ADMIN_DETAILS": request.app.state.config.SHOW_ADMIN_DETAILS, + "ENABLE_SIGNUP": request.app.state.config.ENABLE_SIGNUP, + "DEFAULT_USER_ROLE": request.app.state.config.DEFAULT_USER_ROLE, + "JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN, + "ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING, + } + + +class AdminConfig(BaseModel): + SHOW_ADMIN_DETAILS: bool + ENABLE_SIGNUP: bool + DEFAULT_USER_ROLE: str + JWT_EXPIRES_IN: str + ENABLE_COMMUNITY_SHARING: bool + + +@router.post("/admin/config") +async def update_admin_config( + request: Request, form_data: AdminConfig, user=Depends(get_admin_user) +): + request.app.state.config.SHOW_ADMIN_DETAILS = form_data.SHOW_ADMIN_DETAILS + request.app.state.config.ENABLE_SIGNUP = form_data.ENABLE_SIGNUP + + if form_data.DEFAULT_USER_ROLE in ["pending", "user", "admin"]: + request.app.state.config.DEFAULT_USER_ROLE = form_data.DEFAULT_USER_ROLE + + pattern = r"^(-1|0|(-?\d+(\.\d+)?)(ms|s|m|h|d|w))$" + + # Check if the input string matches the pattern + if re.match(pattern, form_data.JWT_EXPIRES_IN): + request.app.state.config.JWT_EXPIRES_IN = form_data.JWT_EXPIRES_IN + + request.app.state.config.ENABLE_COMMUNITY_SHARING = ( + form_data.ENABLE_COMMUNITY_SHARING + ) + + return { + "SHOW_ADMIN_DETAILS": request.app.state.config.SHOW_ADMIN_DETAILS, + "ENABLE_SIGNUP": request.app.state.config.ENABLE_SIGNUP, + "DEFAULT_USER_ROLE": request.app.state.config.DEFAULT_USER_ROLE, + "JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN, + "ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING, + } + + +############################ +# API Key +############################ + + +# create api key +@router.post("/api_key", response_model=ApiKey) +async def create_api_key_(user=Depends(get_current_user)): + api_key = create_api_key() + success = Users.update_user_api_key_by_id(user.id, api_key) + if success: + return { + "api_key": api_key, + } + else: + raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_API_KEY_ERROR) + + +# delete api key +@router.delete("/api_key", response_model=bool) +async def delete_api_key(user=Depends(get_current_user)): + success = Users.update_user_api_key_by_id(user.id, None) + return success + + +# get api key +@router.get("/api_key", response_model=ApiKey) +async def get_api_key(user=Depends(get_current_user)): + api_key = Users.get_user_api_key_by_id(user.id) + if api_key: + return { + "api_key": api_key, + } + else: + raise HTTPException(404, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND) diff --git a/backend/apps/webui/routers/chats.py b/backend/apps/webui/routers/chats.py new file mode 100644 index 0000000000000000000000000000000000000000..80308a451b95a79bc146dd2aa1f3a32181113e21 --- /dev/null +++ b/backend/apps/webui/routers/chats.py @@ -0,0 +1,483 @@ +from fastapi import Depends, Request, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union, Optional +from utils.utils import get_verified_user, get_admin_user +from fastapi import APIRouter +from pydantic import BaseModel +import json +import logging + +from apps.webui.models.users import Users +from apps.webui.models.chats import ( + ChatModel, + ChatResponse, + ChatTitleForm, + ChatForm, + ChatTitleIdResponse, + Chats, +) + + +from apps.webui.models.tags import ( + TagModel, + ChatIdTagModel, + ChatIdTagForm, + ChatTagsResponse, + Tags, +) + +from constants import ERROR_MESSAGES + +from config import SRC_LOG_LEVELS, ENABLE_ADMIN_EXPORT + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +router = APIRouter() + +############################ +# GetChatList +############################ + + +@router.get("/", response_model=List[ChatTitleIdResponse]) +@router.get("/list", response_model=List[ChatTitleIdResponse]) +async def get_session_user_chat_list( + user=Depends(get_verified_user), skip: int = 0, limit: int = 50 +): + return Chats.get_chat_title_id_list_by_user_id(user.id, skip=skip, limit=limit) + + +############################ +# DeleteAllChats +############################ + + +@router.delete("/", response_model=bool) +async def delete_all_user_chats(request: Request, user=Depends(get_verified_user)): + + if ( + user.role == "user" + and not request.app.state.config.USER_PERMISSIONS["chat"]["deletion"] + ): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + result = Chats.delete_chats_by_user_id(user.id) + return result + + +############################ +# GetUserChatList +############################ + + +@router.get("/list/user/{user_id}", response_model=List[ChatTitleIdResponse]) +async def get_user_chat_list_by_user_id( + user_id: str, + user=Depends(get_admin_user), + skip: int = 0, + limit: int = 50, +): + return Chats.get_chat_list_by_user_id( + user_id, include_archived=True, skip=skip, limit=limit + ) + + +############################ +# CreateNewChat +############################ + + +@router.post("/new", response_model=Optional[ChatResponse]) +async def create_new_chat(form_data: ChatForm, user=Depends(get_verified_user)): + try: + chat = Chats.insert_new_chat(user.id, form_data) + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT() + ) + + +############################ +# GetChats +############################ + + +@router.get("/all", response_model=List[ChatResponse]) +async def get_user_chats(user=Depends(get_verified_user)): + return [ + ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + for chat in Chats.get_chats_by_user_id(user.id) + ] + + +############################ +# GetArchivedChats +############################ + + +@router.get("/all/archived", response_model=List[ChatResponse]) +async def get_user_archived_chats(user=Depends(get_verified_user)): + return [ + ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + for chat in Chats.get_archived_chats_by_user_id(user.id) + ] + + +############################ +# GetAllChatsInDB +############################ + + +@router.get("/all/db", response_model=List[ChatResponse]) +async def get_all_user_chats_in_db(user=Depends(get_admin_user)): + if not ENABLE_ADMIN_EXPORT: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + return [ + ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + for chat in Chats.get_chats() + ] + + +############################ +# GetArchivedChats +############################ + + +@router.get("/archived", response_model=List[ChatTitleIdResponse]) +async def get_archived_session_user_chat_list( + user=Depends(get_verified_user), skip: int = 0, limit: int = 50 +): + return Chats.get_archived_chat_list_by_user_id(user.id, skip, limit) + + +############################ +# ArchiveAllChats +############################ + + +@router.post("/archive/all", response_model=bool) +async def archive_all_chats(user=Depends(get_verified_user)): + return Chats.archive_all_chats_by_user_id(user.id) + + +############################ +# GetSharedChatById +############################ + + +@router.get("/share/{share_id}", response_model=Optional[ChatResponse]) +async def get_shared_chat_by_id(share_id: str, user=Depends(get_verified_user)): + if user.role == "pending": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) + + if user.role == "user": + chat = Chats.get_chat_by_share_id(share_id) + elif user.role == "admin": + chat = Chats.get_chat_by_id(share_id) + + if chat: + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) + + +############################ +# GetChatsByTags +############################ + + +class TagNameForm(BaseModel): + name: str + skip: Optional[int] = 0 + limit: Optional[int] = 50 + + +@router.post("/tags", response_model=List[ChatTitleIdResponse]) +async def get_user_chat_list_by_tag_name( + form_data: TagNameForm, user=Depends(get_verified_user) +): + + chat_ids = [ + chat_id_tag.chat_id + for chat_id_tag in Tags.get_chat_ids_by_tag_name_and_user_id( + form_data.name, user.id + ) + ] + + chats = Chats.get_chat_list_by_chat_ids(chat_ids, form_data.skip, form_data.limit) + + if len(chats) == 0: + Tags.delete_tag_by_tag_name_and_user_id(form_data.name, user.id) + + return chats + + +############################ +# GetAllTags +############################ + + +@router.get("/tags/all", response_model=List[TagModel]) +async def get_all_tags(user=Depends(get_verified_user)): + try: + tags = Tags.get_tags_by_user_id(user.id) + return tags + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT() + ) + + +############################ +# GetChatById +############################ + + +@router.get("/{id}", response_model=Optional[ChatResponse]) +async def get_chat_by_id(id: str, user=Depends(get_verified_user)): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + + if chat: + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) + + +############################ +# UpdateChatById +############################ + + +@router.post("/{id}", response_model=Optional[ChatResponse]) +async def update_chat_by_id( + id: str, form_data: ChatForm, user=Depends(get_verified_user) +): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + if chat: + updated_chat = {**json.loads(chat.chat), **form_data.chat} + + chat = Chats.update_chat_by_id(id, updated_chat) + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + +############################ +# DeleteChatById +############################ + + +@router.delete("/{id}", response_model=bool) +async def delete_chat_by_id(request: Request, id: str, user=Depends(get_verified_user)): + + if user.role == "admin": + result = Chats.delete_chat_by_id(id) + return result + else: + if not request.app.state.config.USER_PERMISSIONS["chat"]["deletion"]: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + result = Chats.delete_chat_by_id_and_user_id(id, user.id) + return result + + +############################ +# CloneChat +############################ + + +@router.get("/{id}/clone", response_model=Optional[ChatResponse]) +async def clone_chat_by_id(id: str, user=Depends(get_verified_user)): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + if chat: + + chat_body = json.loads(chat.chat) + updated_chat = { + **chat_body, + "originalChatId": chat.id, + "branchPointMessageId": chat_body["history"]["currentId"], + "title": f"Clone of {chat.title}", + } + + chat = Chats.insert_new_chat(user.id, ChatForm(**{"chat": updated_chat})) + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.DEFAULT() + ) + + +############################ +# ArchiveChat +############################ + + +@router.get("/{id}/archive", response_model=Optional[ChatResponse]) +async def archive_chat_by_id(id: str, user=Depends(get_verified_user)): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + if chat: + chat = Chats.toggle_chat_archive_by_id(id) + return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.DEFAULT() + ) + + +############################ +# ShareChatById +############################ + + +@router.post("/{id}/share", response_model=Optional[ChatResponse]) +async def share_chat_by_id(id: str, user=Depends(get_verified_user)): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + if chat: + if chat.share_id: + shared_chat = Chats.update_shared_chat_by_chat_id(chat.id) + return ChatResponse( + **{**shared_chat.model_dump(), "chat": json.loads(shared_chat.chat)} + ) + + shared_chat = Chats.insert_shared_chat_by_chat_id(chat.id) + if not shared_chat: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=ERROR_MESSAGES.DEFAULT(), + ) + + return ChatResponse( + **{**shared_chat.model_dump(), "chat": json.loads(shared_chat.chat)} + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + +############################ +# DeletedSharedChatById +############################ + + +@router.delete("/{id}/share", response_model=Optional[bool]) +async def delete_shared_chat_by_id(id: str, user=Depends(get_verified_user)): + chat = Chats.get_chat_by_id_and_user_id(id, user.id) + if chat: + if not chat.share_id: + return False + + result = Chats.delete_shared_chat_by_chat_id(id) + update_result = Chats.update_chat_share_id_by_id(id, None) + + return result and update_result != None + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + +############################ +# GetChatTagsById +############################ + + +@router.get("/{id}/tags", response_model=List[TagModel]) +async def get_chat_tags_by_id(id: str, user=Depends(get_verified_user)): + tags = Tags.get_tags_by_chat_id_and_user_id(id, user.id) + + if tags != None: + return tags + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) + + +############################ +# AddChatTagById +############################ + + +@router.post("/{id}/tags", response_model=Optional[ChatIdTagModel]) +async def add_chat_tag_by_id( + id: str, form_data: ChatIdTagForm, user=Depends(get_verified_user) +): + tags = Tags.get_tags_by_chat_id_and_user_id(id, user.id) + + if form_data.tag_name not in tags: + tag = Tags.add_tag_to_chat(user.id, form_data) + + if tag: + return tag + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.DEFAULT() + ) + + +############################ +# DeleteChatTagById +############################ + + +@router.delete("/{id}/tags", response_model=Optional[bool]) +async def delete_chat_tag_by_id( + id: str, form_data: ChatIdTagForm, user=Depends(get_verified_user) +): + result = Tags.delete_tag_by_tag_name_and_chat_id_and_user_id( + form_data.tag_name, id, user.id + ) + + if result: + return result + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) + + +############################ +# DeleteAllChatTagsById +############################ + + +@router.delete("/{id}/tags/all", response_model=Optional[bool]) +async def delete_all_chat_tags_by_id(id: str, user=Depends(get_verified_user)): + result = Tags.delete_tags_by_chat_id_and_user_id(id, user.id) + + if result: + return result + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND + ) diff --git a/backend/apps/webui/routers/configs.py b/backend/apps/webui/routers/configs.py new file mode 100644 index 0000000000000000000000000000000000000000..39e435013541d40c1f058be8b783595d08515bf4 --- /dev/null +++ b/backend/apps/webui/routers/configs.py @@ -0,0 +1,89 @@ +from fastapi import Response, Request +from fastapi import Depends, FastAPI, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union + +from fastapi import APIRouter +from pydantic import BaseModel +import time +import uuid + +from config import BannerModel + +from apps.webui.models.users import Users + +from utils.utils import ( + get_password_hash, + get_verified_user, + get_admin_user, + create_token, +) +from utils.misc import get_gravatar_url, validate_email_format +from constants import ERROR_MESSAGES + +router = APIRouter() + + +class SetDefaultModelsForm(BaseModel): + models: str + + +class PromptSuggestion(BaseModel): + title: List[str] + content: str + + +class SetDefaultSuggestionsForm(BaseModel): + suggestions: List[PromptSuggestion] + + +############################ +# SetDefaultModels +############################ + + +@router.post("/default/models", response_model=str) +async def set_global_default_models( + request: Request, form_data: SetDefaultModelsForm, user=Depends(get_admin_user) +): + request.app.state.config.DEFAULT_MODELS = form_data.models + return request.app.state.config.DEFAULT_MODELS + + +@router.post("/default/suggestions", response_model=List[PromptSuggestion]) +async def set_global_default_suggestions( + request: Request, + form_data: SetDefaultSuggestionsForm, + user=Depends(get_admin_user), +): + data = form_data.model_dump() + request.app.state.config.DEFAULT_PROMPT_SUGGESTIONS = data["suggestions"] + return request.app.state.config.DEFAULT_PROMPT_SUGGESTIONS + + +############################ +# SetBanners +############################ + + +class SetBannersForm(BaseModel): + banners: List[BannerModel] + + +@router.post("/banners", response_model=List[BannerModel]) +async def set_banners( + request: Request, + form_data: SetBannersForm, + user=Depends(get_admin_user), +): + data = form_data.model_dump() + request.app.state.config.BANNERS = data["banners"] + return request.app.state.config.BANNERS + + +@router.get("/banners", response_model=List[BannerModel]) +async def get_banners( + request: Request, + user=Depends(get_verified_user), +): + return request.app.state.config.BANNERS diff --git a/backend/apps/webui/routers/documents.py b/backend/apps/webui/routers/documents.py new file mode 100644 index 0000000000000000000000000000000000000000..2299b2fee3d5fd8e9a65c15ab564e05f2c256039 --- /dev/null +++ b/backend/apps/webui/routers/documents.py @@ -0,0 +1,160 @@ +from fastapi import Depends, FastAPI, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import json + +from apps.webui.models.documents import ( + Documents, + DocumentForm, + DocumentUpdateForm, + DocumentModel, + DocumentResponse, +) + +from utils.utils import get_verified_user, get_admin_user +from constants import ERROR_MESSAGES + +router = APIRouter() + +############################ +# GetDocuments +############################ + + +@router.get("/", response_model=List[DocumentResponse]) +async def get_documents(user=Depends(get_verified_user)): + docs = [ + DocumentResponse( + **{ + **doc.model_dump(), + "content": json.loads(doc.content if doc.content else "{}"), + } + ) + for doc in Documents.get_docs() + ] + return docs + + +############################ +# CreateNewDoc +############################ + + +@router.post("/create", response_model=Optional[DocumentResponse]) +async def create_new_doc(form_data: DocumentForm, user=Depends(get_admin_user)): + doc = Documents.get_doc_by_name(form_data.name) + if doc == None: + doc = Documents.insert_new_doc(user.id, form_data) + + if doc: + return DocumentResponse( + **{ + **doc.model_dump(), + "content": json.loads(doc.content if doc.content else "{}"), + } + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.FILE_EXISTS, + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.NAME_TAG_TAKEN, + ) + + +############################ +# GetDocByName +############################ + + +@router.get("/doc", response_model=Optional[DocumentResponse]) +async def get_doc_by_name(name: str, user=Depends(get_verified_user)): + doc = Documents.get_doc_by_name(name) + + if doc: + return DocumentResponse( + **{ + **doc.model_dump(), + "content": json.loads(doc.content if doc.content else "{}"), + } + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# TagDocByName +############################ + + +class TagItem(BaseModel): + name: str + + +class TagDocumentForm(BaseModel): + name: str + tags: List[dict] + + +@router.post("/doc/tags", response_model=Optional[DocumentResponse]) +async def tag_doc_by_name(form_data: TagDocumentForm, user=Depends(get_verified_user)): + doc = Documents.update_doc_content_by_name(form_data.name, {"tags": form_data.tags}) + + if doc: + return DocumentResponse( + **{ + **doc.model_dump(), + "content": json.loads(doc.content if doc.content else "{}"), + } + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateDocByName +############################ + + +@router.post("/doc/update", response_model=Optional[DocumentResponse]) +async def update_doc_by_name( + name: str, + form_data: DocumentUpdateForm, + user=Depends(get_admin_user), +): + doc = Documents.update_doc_by_name(name, form_data) + if doc: + return DocumentResponse( + **{ + **doc.model_dump(), + "content": json.loads(doc.content if doc.content else "{}"), + } + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.NAME_TAG_TAKEN, + ) + + +############################ +# DeleteDocByName +############################ + + +@router.delete("/doc/delete", response_model=bool) +async def delete_doc_by_name(name: str, user=Depends(get_admin_user)): + result = Documents.delete_doc_by_name(name) + return result diff --git a/backend/apps/webui/routers/files.py b/backend/apps/webui/routers/files.py new file mode 100644 index 0000000000000000000000000000000000000000..99fb923a12ccbb034c80979dc28f8fceec323a51 --- /dev/null +++ b/backend/apps/webui/routers/files.py @@ -0,0 +1,241 @@ +from fastapi import ( + Depends, + FastAPI, + HTTPException, + status, + Request, + UploadFile, + File, + Form, +) + + +from datetime import datetime, timedelta +from typing import List, Union, Optional +from pathlib import Path + +from fastapi import APIRouter +from fastapi.responses import StreamingResponse, JSONResponse, FileResponse + +from pydantic import BaseModel +import json + +from apps.webui.models.files import ( + Files, + FileForm, + FileModel, + FileModelResponse, +) +from utils.utils import get_verified_user, get_admin_user +from constants import ERROR_MESSAGES + +from importlib import util +import os +import uuid +import os, shutil, logging, re + + +from config import SRC_LOG_LEVELS, UPLOAD_DIR + + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + + +router = APIRouter() + +############################ +# Upload File +############################ + + +@router.post("/") +def upload_file(file: UploadFile = File(...), user=Depends(get_verified_user)): + log.info(f"file.content_type: {file.content_type}") + try: + unsanitized_filename = file.filename + filename = os.path.basename(unsanitized_filename) + + # replace filename with uuid + id = str(uuid.uuid4()) + name = filename + filename = f"{id}_{filename}" + file_path = f"{UPLOAD_DIR}/{filename}" + + contents = file.file.read() + with open(file_path, "wb") as f: + f.write(contents) + f.close() + + file = Files.insert_new_file( + user.id, + FileForm( + **{ + "id": id, + "filename": filename, + "meta": { + "name": name, + "content_type": file.content_type, + "size": len(contents), + "path": file_path, + }, + } + ), + ) + + if file: + return file + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error uploading file"), + ) + + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +############################ +# List Files +############################ + + +@router.get("/", response_model=List[FileModel]) +async def list_files(user=Depends(get_verified_user)): + files = Files.get_files() + return files + + +############################ +# Delete All Files +############################ + + +@router.delete("/all") +async def delete_all_files(user=Depends(get_admin_user)): + result = Files.delete_all_files() + + if result: + folder = f"{UPLOAD_DIR}" + try: + # Check if the directory exists + if os.path.exists(folder): + # Iterate over all the files and directories in the specified directory + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) # Remove the file or link + elif os.path.isdir(file_path): + shutil.rmtree(file_path) # Remove the directory + except Exception as e: + print(f"Failed to delete {file_path}. Reason: {e}") + else: + print(f"The directory {folder} does not exist") + except Exception as e: + print(f"Failed to process the directory {folder}. Reason: {e}") + + return {"message": "All files deleted successfully"} + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error deleting files"), + ) + + +############################ +# Get File By Id +############################ + + +@router.get("/{id}", response_model=Optional[FileModel]) +async def get_file_by_id(id: str, user=Depends(get_verified_user)): + file = Files.get_file_by_id(id) + + if file: + return file + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# Get File Content By Id +############################ + + +@router.get("/{id}/content", response_model=Optional[FileModel]) +async def get_file_content_by_id(id: str, user=Depends(get_verified_user)): + file = Files.get_file_by_id(id) + + if file: + file_path = Path(file.meta["path"]) + + # Check if the file already exists in the cache + if file_path.is_file(): + print(f"file_path: {file_path}") + return FileResponse(file_path) + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +@router.get("/{id}/content/{file_name}", response_model=Optional[FileModel]) +async def get_file_content_by_id(id: str, user=Depends(get_verified_user)): + file = Files.get_file_by_id(id) + + if file: + file_path = Path(file.meta["path"]) + + # Check if the file already exists in the cache + if file_path.is_file(): + print(f"file_path: {file_path}") + return FileResponse(file_path) + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# Delete File By Id +############################ + + +@router.delete("/{id}") +async def delete_file_by_id(id: str, user=Depends(get_verified_user)): + file = Files.get_file_by_id(id) + + if file: + result = Files.delete_file_by_id(id) + if result: + return {"message": "File deleted successfully"} + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error deleting file"), + ) + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=ERROR_MESSAGES.NOT_FOUND, + ) diff --git a/backend/apps/webui/routers/functions.py b/backend/apps/webui/routers/functions.py new file mode 100644 index 0000000000000000000000000000000000000000..eb5216b202b0858b87cfcda16fc1ed9e4c3cd96c --- /dev/null +++ b/backend/apps/webui/routers/functions.py @@ -0,0 +1,426 @@ +from fastapi import Depends, FastAPI, HTTPException, status, Request +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import json + +from apps.webui.models.functions import ( + Functions, + FunctionForm, + FunctionModel, + FunctionResponse, +) +from apps.webui.utils import load_function_module_by_id +from utils.utils import get_verified_user, get_admin_user +from constants import ERROR_MESSAGES + +from importlib import util +import os +from pathlib import Path + +from config import DATA_DIR, CACHE_DIR, FUNCTIONS_DIR + + +router = APIRouter() + +############################ +# GetFunctions +############################ + + +@router.get("/", response_model=List[FunctionResponse]) +async def get_functions(user=Depends(get_verified_user)): + return Functions.get_functions() + + +############################ +# ExportFunctions +############################ + + +@router.get("/export", response_model=List[FunctionModel]) +async def get_functions(user=Depends(get_admin_user)): + return Functions.get_functions() + + +############################ +# CreateNewFunction +############################ + + +@router.post("/create", response_model=Optional[FunctionResponse]) +async def create_new_function( + request: Request, form_data: FunctionForm, user=Depends(get_admin_user) +): + if not form_data.id.isidentifier(): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Only alphanumeric characters and underscores are allowed in the id", + ) + + form_data.id = form_data.id.lower() + + function = Functions.get_function_by_id(form_data.id) + if function == None: + function_path = os.path.join(FUNCTIONS_DIR, f"{form_data.id}.py") + try: + with open(function_path, "w") as function_file: + function_file.write(form_data.content) + + function_module, function_type, frontmatter = load_function_module_by_id( + form_data.id + ) + form_data.meta.manifest = frontmatter + + FUNCTIONS = request.app.state.FUNCTIONS + FUNCTIONS[form_data.id] = function_module + + function = Functions.insert_new_function(user.id, function_type, form_data) + + function_cache_dir = Path(CACHE_DIR) / "functions" / form_data.id + function_cache_dir.mkdir(parents=True, exist_ok=True) + + if function: + return function + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error creating function"), + ) + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.ID_TAKEN, + ) + + +############################ +# GetFunctionById +############################ + + +@router.get("/id/{id}", response_model=Optional[FunctionModel]) +async def get_function_by_id(id: str, user=Depends(get_admin_user)): + function = Functions.get_function_by_id(id) + + if function: + return function + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# ToggleFunctionById +############################ + + +@router.post("/id/{id}/toggle", response_model=Optional[FunctionModel]) +async def toggle_function_by_id(id: str, user=Depends(get_admin_user)): + function = Functions.get_function_by_id(id) + if function: + function = Functions.update_function_by_id( + id, {"is_active": not function.is_active} + ) + + if function: + return function + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error updating function"), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# ToggleGlobalById +############################ + + +@router.post("/id/{id}/toggle/global", response_model=Optional[FunctionModel]) +async def toggle_global_by_id(id: str, user=Depends(get_admin_user)): + function = Functions.get_function_by_id(id) + if function: + function = Functions.update_function_by_id( + id, {"is_global": not function.is_global} + ) + + if function: + return function + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error updating function"), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateFunctionById +############################ + + +@router.post("/id/{id}/update", response_model=Optional[FunctionModel]) +async def update_function_by_id( + request: Request, id: str, form_data: FunctionForm, user=Depends(get_admin_user) +): + function_path = os.path.join(FUNCTIONS_DIR, f"{id}.py") + + try: + with open(function_path, "w") as function_file: + function_file.write(form_data.content) + + function_module, function_type, frontmatter = load_function_module_by_id(id) + form_data.meta.manifest = frontmatter + + FUNCTIONS = request.app.state.FUNCTIONS + FUNCTIONS[id] = function_module + + updated = {**form_data.model_dump(exclude={"id"}), "type": function_type} + print(updated) + + function = Functions.update_function_by_id(id, updated) + + if function: + return function + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error updating function"), + ) + + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +############################ +# DeleteFunctionById +############################ + + +@router.delete("/id/{id}/delete", response_model=bool) +async def delete_function_by_id( + request: Request, id: str, user=Depends(get_admin_user) +): + result = Functions.delete_function_by_id(id) + + if result: + FUNCTIONS = request.app.state.FUNCTIONS + if id in FUNCTIONS: + del FUNCTIONS[id] + + # delete the function file + function_path = os.path.join(FUNCTIONS_DIR, f"{id}.py") + try: + os.remove(function_path) + except: + pass + + return result + + +############################ +# GetFunctionValves +############################ + + +@router.get("/id/{id}/valves", response_model=Optional[dict]) +async def get_function_valves_by_id(id: str, user=Depends(get_admin_user)): + function = Functions.get_function_by_id(id) + if function: + try: + valves = Functions.get_function_valves_by_id(id) + return valves + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# GetFunctionValvesSpec +############################ + + +@router.get("/id/{id}/valves/spec", response_model=Optional[dict]) +async def get_function_valves_spec_by_id( + request: Request, id: str, user=Depends(get_admin_user) +): + function = Functions.get_function_by_id(id) + if function: + if id in request.app.state.FUNCTIONS: + function_module = request.app.state.FUNCTIONS[id] + else: + function_module, function_type, frontmatter = load_function_module_by_id(id) + request.app.state.FUNCTIONS[id] = function_module + + if hasattr(function_module, "Valves"): + Valves = function_module.Valves + return Valves.schema() + return None + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateFunctionValves +############################ + + +@router.post("/id/{id}/valves/update", response_model=Optional[dict]) +async def update_function_valves_by_id( + request: Request, id: str, form_data: dict, user=Depends(get_admin_user) +): + function = Functions.get_function_by_id(id) + if function: + + if id in request.app.state.FUNCTIONS: + function_module = request.app.state.FUNCTIONS[id] + else: + function_module, function_type, frontmatter = load_function_module_by_id(id) + request.app.state.FUNCTIONS[id] = function_module + + if hasattr(function_module, "Valves"): + Valves = function_module.Valves + + try: + form_data = {k: v for k, v in form_data.items() if v is not None} + valves = Valves(**form_data) + Functions.update_function_valves_by_id(id, valves.model_dump()) + return valves.model_dump() + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# FunctionUserValves +############################ + + +@router.get("/id/{id}/valves/user", response_model=Optional[dict]) +async def get_function_user_valves_by_id(id: str, user=Depends(get_verified_user)): + function = Functions.get_function_by_id(id) + if function: + try: + user_valves = Functions.get_user_valves_by_id_and_user_id(id, user.id) + return user_valves + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +@router.get("/id/{id}/valves/user/spec", response_model=Optional[dict]) +async def get_function_user_valves_spec_by_id( + request: Request, id: str, user=Depends(get_verified_user) +): + function = Functions.get_function_by_id(id) + if function: + if id in request.app.state.FUNCTIONS: + function_module = request.app.state.FUNCTIONS[id] + else: + function_module, function_type, frontmatter = load_function_module_by_id(id) + request.app.state.FUNCTIONS[id] = function_module + + if hasattr(function_module, "UserValves"): + UserValves = function_module.UserValves + return UserValves.schema() + return None + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +@router.post("/id/{id}/valves/user/update", response_model=Optional[dict]) +async def update_function_user_valves_by_id( + request: Request, id: str, form_data: dict, user=Depends(get_verified_user) +): + function = Functions.get_function_by_id(id) + + if function: + if id in request.app.state.FUNCTIONS: + function_module = request.app.state.FUNCTIONS[id] + else: + function_module, function_type, frontmatter = load_function_module_by_id(id) + request.app.state.FUNCTIONS[id] = function_module + + if hasattr(function_module, "UserValves"): + UserValves = function_module.UserValves + + try: + form_data = {k: v for k, v in form_data.items() if v is not None} + user_valves = UserValves(**form_data) + Functions.update_user_valves_by_id_and_user_id( + id, user.id, user_valves.model_dump() + ) + return user_valves.model_dump() + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) diff --git a/backend/apps/webui/routers/memories.py b/backend/apps/webui/routers/memories.py new file mode 100644 index 0000000000000000000000000000000000000000..2c473ebe8f6389f3512841f0020abab5d4dc9187 --- /dev/null +++ b/backend/apps/webui/routers/memories.py @@ -0,0 +1,180 @@ +from fastapi import Response, Request +from fastapi import Depends, FastAPI, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import logging + +from apps.webui.models.memories import Memories, MemoryModel + +from utils.utils import get_verified_user +from constants import ERROR_MESSAGES + +from config import SRC_LOG_LEVELS, CHROMA_CLIENT + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +router = APIRouter() + + +@router.get("/ef") +async def get_embeddings(request: Request): + return {"result": request.app.state.EMBEDDING_FUNCTION("hello world")} + + +############################ +# GetMemories +############################ + + +@router.get("/", response_model=List[MemoryModel]) +async def get_memories(user=Depends(get_verified_user)): + return Memories.get_memories_by_user_id(user.id) + + +############################ +# AddMemory +############################ + + +class AddMemoryForm(BaseModel): + content: str + + +class MemoryUpdateModel(BaseModel): + content: Optional[str] = None + + +@router.post("/add", response_model=Optional[MemoryModel]) +async def add_memory( + request: Request, + form_data: AddMemoryForm, + user=Depends(get_verified_user), +): + memory = Memories.insert_new_memory(user.id, form_data.content) + memory_embedding = request.app.state.EMBEDDING_FUNCTION(memory.content) + + collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}") + collection.upsert( + documents=[memory.content], + ids=[memory.id], + embeddings=[memory_embedding], + metadatas=[{"created_at": memory.created_at}], + ) + + return memory + + +@router.post("/{memory_id}/update", response_model=Optional[MemoryModel]) +async def update_memory_by_id( + memory_id: str, + request: Request, + form_data: MemoryUpdateModel, + user=Depends(get_verified_user), +): + memory = Memories.update_memory_by_id(memory_id, form_data.content) + if memory is None: + raise HTTPException(status_code=404, detail="Memory not found") + + if form_data.content is not None: + memory_embedding = request.app.state.EMBEDDING_FUNCTION(form_data.content) + collection = CHROMA_CLIENT.get_or_create_collection( + name=f"user-memory-{user.id}" + ) + collection.upsert( + documents=[form_data.content], + ids=[memory.id], + embeddings=[memory_embedding], + metadatas=[ + {"created_at": memory.created_at, "updated_at": memory.updated_at} + ], + ) + + return memory + + +############################ +# QueryMemory +############################ + + +class QueryMemoryForm(BaseModel): + content: str + k: Optional[int] = 1 + + +@router.post("/query") +async def query_memory( + request: Request, form_data: QueryMemoryForm, user=Depends(get_verified_user) +): + query_embedding = request.app.state.EMBEDDING_FUNCTION(form_data.content) + collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}") + + results = collection.query( + query_embeddings=[query_embedding], + n_results=form_data.k, # how many results to return + ) + + return results + + +############################ +# ResetMemoryFromVectorDB +############################ +@router.get("/reset", response_model=bool) +async def reset_memory_from_vector_db( + request: Request, user=Depends(get_verified_user) +): + CHROMA_CLIENT.delete_collection(f"user-memory-{user.id}") + collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}") + + memories = Memories.get_memories_by_user_id(user.id) + for memory in memories: + memory_embedding = request.app.state.EMBEDDING_FUNCTION(memory.content) + collection.upsert( + documents=[memory.content], + ids=[memory.id], + embeddings=[memory_embedding], + ) + return True + + +############################ +# DeleteMemoriesByUserId +############################ + + +@router.delete("/user", response_model=bool) +async def delete_memory_by_user_id(user=Depends(get_verified_user)): + result = Memories.delete_memories_by_user_id(user.id) + + if result: + try: + CHROMA_CLIENT.delete_collection(f"user-memory-{user.id}") + except Exception as e: + log.error(e) + return True + + return False + + +############################ +# DeleteMemoryById +############################ + + +@router.delete("/{memory_id}", response_model=bool) +async def delete_memory_by_id(memory_id: str, user=Depends(get_verified_user)): + result = Memories.delete_memory_by_id_and_user_id(memory_id, user.id) + + if result: + collection = CHROMA_CLIENT.get_or_create_collection( + name=f"user-memory-{user.id}" + ) + collection.delete(ids=[memory_id]) + return True + + return False diff --git a/backend/apps/webui/routers/models.py b/backend/apps/webui/routers/models.py new file mode 100644 index 0000000000000000000000000000000000000000..eeae9e1c41a7608b5fb256bb46d9e5de25af1738 --- /dev/null +++ b/backend/apps/webui/routers/models.py @@ -0,0 +1,113 @@ +from fastapi import Depends, FastAPI, HTTPException, status, Request +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import json + +from apps.webui.models.models import Models, ModelModel, ModelForm, ModelResponse + +from utils.utils import get_verified_user, get_admin_user +from constants import ERROR_MESSAGES + +router = APIRouter() + +########################### +# getModels +########################### + + +@router.get("/", response_model=List[ModelResponse]) +async def get_models(user=Depends(get_verified_user)): + return Models.get_all_models() + + +############################ +# AddNewModel +############################ + + +@router.post("/add", response_model=Optional[ModelModel]) +async def add_new_model( + request: Request, + form_data: ModelForm, + user=Depends(get_admin_user), +): + if form_data.id in request.app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.MODEL_ID_TAKEN, + ) + else: + model = Models.insert_new_model(form_data, user.id) + + if model: + return model + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.DEFAULT(), + ) + + +############################ +# GetModelById +############################ + + +@router.get("/", response_model=Optional[ModelModel]) +async def get_model_by_id(id: str, user=Depends(get_verified_user)): + model = Models.get_model_by_id(id) + + if model: + return model + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateModelById +############################ + + +@router.post("/update", response_model=Optional[ModelModel]) +async def update_model_by_id( + request: Request, + id: str, + form_data: ModelForm, + user=Depends(get_admin_user), +): + model = Models.get_model_by_id(id) + if model: + model = Models.update_model_by_id(id, form_data) + return model + else: + if form_data.id in request.app.state.MODELS: + model = Models.insert_new_model(form_data, user.id) + if model: + return model + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.DEFAULT(), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.DEFAULT(), + ) + + +############################ +# DeleteModelById +############################ + + +@router.delete("/delete", response_model=bool) +async def delete_model_by_id(id: str, user=Depends(get_admin_user)): + result = Models.delete_model_by_id(id) + return result diff --git a/backend/apps/webui/routers/prompts.py b/backend/apps/webui/routers/prompts.py new file mode 100644 index 0000000000000000000000000000000000000000..c674590e95998979f8f7749901fa9a699b9cb8f6 --- /dev/null +++ b/backend/apps/webui/routers/prompts.py @@ -0,0 +1,96 @@ +from fastapi import Depends, FastAPI, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import json + +from apps.webui.models.prompts import Prompts, PromptForm, PromptModel + +from utils.utils import get_verified_user, get_admin_user +from constants import ERROR_MESSAGES + +router = APIRouter() + +############################ +# GetPrompts +############################ + + +@router.get("/", response_model=List[PromptModel]) +async def get_prompts(user=Depends(get_verified_user)): + return Prompts.get_prompts() + + +############################ +# CreateNewPrompt +############################ + + +@router.post("/create", response_model=Optional[PromptModel]) +async def create_new_prompt(form_data: PromptForm, user=Depends(get_admin_user)): + prompt = Prompts.get_prompt_by_command(form_data.command) + if prompt == None: + prompt = Prompts.insert_new_prompt(user.id, form_data) + + if prompt: + return prompt + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(), + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.COMMAND_TAKEN, + ) + + +############################ +# GetPromptByCommand +############################ + + +@router.get("/command/{command}", response_model=Optional[PromptModel]) +async def get_prompt_by_command(command: str, user=Depends(get_verified_user)): + prompt = Prompts.get_prompt_by_command(f"/{command}") + + if prompt: + return prompt + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdatePromptByCommand +############################ + + +@router.post("/command/{command}/update", response_model=Optional[PromptModel]) +async def update_prompt_by_command( + command: str, + form_data: PromptForm, + user=Depends(get_admin_user), +): + prompt = Prompts.update_prompt_by_command(f"/{command}", form_data) + if prompt: + return prompt + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + + +############################ +# DeletePromptByCommand +############################ + + +@router.delete("/command/{command}/delete", response_model=bool) +async def delete_prompt_by_command(command: str, user=Depends(get_admin_user)): + result = Prompts.delete_prompt_by_command(f"/{command}") + return result diff --git a/backend/apps/webui/routers/tools.py b/backend/apps/webui/routers/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..ea9db8180b4276bacf03c60a0a624eab80b8e677 --- /dev/null +++ b/backend/apps/webui/routers/tools.py @@ -0,0 +1,379 @@ +from fastapi import Depends, FastAPI, HTTPException, status, Request +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import json + +from apps.webui.models.users import Users +from apps.webui.models.tools import Tools, ToolForm, ToolModel, ToolResponse +from apps.webui.utils import load_toolkit_module_by_id + +from utils.utils import get_admin_user, get_verified_user +from utils.tools import get_tools_specs +from constants import ERROR_MESSAGES + +from importlib import util +import os +from pathlib import Path + +from config import DATA_DIR, CACHE_DIR + + +TOOLS_DIR = f"{DATA_DIR}/tools" +os.makedirs(TOOLS_DIR, exist_ok=True) + + +router = APIRouter() + +############################ +# GetToolkits +############################ + + +@router.get("/", response_model=List[ToolResponse]) +async def get_toolkits(user=Depends(get_verified_user)): + toolkits = [toolkit for toolkit in Tools.get_tools()] + return toolkits + + +############################ +# ExportToolKits +############################ + + +@router.get("/export", response_model=List[ToolModel]) +async def get_toolkits(user=Depends(get_admin_user)): + toolkits = [toolkit for toolkit in Tools.get_tools()] + return toolkits + + +############################ +# CreateNewToolKit +############################ + + +@router.post("/create", response_model=Optional[ToolResponse]) +async def create_new_toolkit( + request: Request, + form_data: ToolForm, + user=Depends(get_admin_user), +): + if not form_data.id.isidentifier(): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Only alphanumeric characters and underscores are allowed in the id", + ) + + form_data.id = form_data.id.lower() + + toolkit = Tools.get_tool_by_id(form_data.id) + if toolkit == None: + toolkit_path = os.path.join(TOOLS_DIR, f"{form_data.id}.py") + try: + with open(toolkit_path, "w") as tool_file: + tool_file.write(form_data.content) + + toolkit_module, frontmatter = load_toolkit_module_by_id(form_data.id) + form_data.meta.manifest = frontmatter + + TOOLS = request.app.state.TOOLS + TOOLS[form_data.id] = toolkit_module + + specs = get_tools_specs(TOOLS[form_data.id]) + toolkit = Tools.insert_new_tool(user.id, form_data, specs) + + tool_cache_dir = Path(CACHE_DIR) / "tools" / form_data.id + tool_cache_dir.mkdir(parents=True, exist_ok=True) + + if toolkit: + return toolkit + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error creating toolkit"), + ) + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.ID_TAKEN, + ) + + +############################ +# GetToolkitById +############################ + + +@router.get("/id/{id}", response_model=Optional[ToolModel]) +async def get_toolkit_by_id(id: str, user=Depends(get_admin_user)): + toolkit = Tools.get_tool_by_id(id) + + if toolkit: + return toolkit + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateToolkitById +############################ + + +@router.post("/id/{id}/update", response_model=Optional[ToolModel]) +async def update_toolkit_by_id( + request: Request, + id: str, + form_data: ToolForm, + user=Depends(get_admin_user), +): + toolkit_path = os.path.join(TOOLS_DIR, f"{id}.py") + + try: + with open(toolkit_path, "w") as tool_file: + tool_file.write(form_data.content) + + toolkit_module, frontmatter = load_toolkit_module_by_id(id) + form_data.meta.manifest = frontmatter + + TOOLS = request.app.state.TOOLS + TOOLS[id] = toolkit_module + + specs = get_tools_specs(TOOLS[id]) + + updated = { + **form_data.model_dump(exclude={"id"}), + "specs": specs, + } + + print(updated) + toolkit = Tools.update_tool_by_id(id, updated) + + if toolkit: + return toolkit + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT("Error updating toolkit"), + ) + + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + +############################ +# DeleteToolkitById +############################ + + +@router.delete("/id/{id}/delete", response_model=bool) +async def delete_toolkit_by_id(request: Request, id: str, user=Depends(get_admin_user)): + result = Tools.delete_tool_by_id(id) + + if result: + TOOLS = request.app.state.TOOLS + if id in TOOLS: + del TOOLS[id] + + # delete the toolkit file + toolkit_path = os.path.join(TOOLS_DIR, f"{id}.py") + os.remove(toolkit_path) + + return result + + +############################ +# GetToolValves +############################ + + +@router.get("/id/{id}/valves", response_model=Optional[dict]) +async def get_toolkit_valves_by_id(id: str, user=Depends(get_admin_user)): + toolkit = Tools.get_tool_by_id(id) + if toolkit: + try: + valves = Tools.get_tool_valves_by_id(id) + return valves + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# GetToolValvesSpec +############################ + + +@router.get("/id/{id}/valves/spec", response_model=Optional[dict]) +async def get_toolkit_valves_spec_by_id( + request: Request, id: str, user=Depends(get_admin_user) +): + toolkit = Tools.get_tool_by_id(id) + if toolkit: + if id in request.app.state.TOOLS: + toolkit_module = request.app.state.TOOLS[id] + else: + toolkit_module, frontmatter = load_toolkit_module_by_id(id) + request.app.state.TOOLS[id] = toolkit_module + + if hasattr(toolkit_module, "Valves"): + Valves = toolkit_module.Valves + return Valves.schema() + return None + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# UpdateToolValves +############################ + + +@router.post("/id/{id}/valves/update", response_model=Optional[dict]) +async def update_toolkit_valves_by_id( + request: Request, id: str, form_data: dict, user=Depends(get_admin_user) +): + toolkit = Tools.get_tool_by_id(id) + if toolkit: + if id in request.app.state.TOOLS: + toolkit_module = request.app.state.TOOLS[id] + else: + toolkit_module, frontmatter = load_toolkit_module_by_id(id) + request.app.state.TOOLS[id] = toolkit_module + + if hasattr(toolkit_module, "Valves"): + Valves = toolkit_module.Valves + + try: + form_data = {k: v for k, v in form_data.items() if v is not None} + valves = Valves(**form_data) + Tools.update_tool_valves_by_id(id, valves.model_dump()) + return valves.model_dump() + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +############################ +# ToolUserValves +############################ + + +@router.get("/id/{id}/valves/user", response_model=Optional[dict]) +async def get_toolkit_user_valves_by_id(id: str, user=Depends(get_verified_user)): + toolkit = Tools.get_tool_by_id(id) + if toolkit: + try: + user_valves = Tools.get_user_valves_by_id_and_user_id(id, user.id) + return user_valves + except Exception as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +@router.get("/id/{id}/valves/user/spec", response_model=Optional[dict]) +async def get_toolkit_user_valves_spec_by_id( + request: Request, id: str, user=Depends(get_verified_user) +): + toolkit = Tools.get_tool_by_id(id) + if toolkit: + if id in request.app.state.TOOLS: + toolkit_module = request.app.state.TOOLS[id] + else: + toolkit_module, frontmatter = load_toolkit_module_by_id(id) + request.app.state.TOOLS[id] = toolkit_module + + if hasattr(toolkit_module, "UserValves"): + UserValves = toolkit_module.UserValves + return UserValves.schema() + return None + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + + +@router.post("/id/{id}/valves/user/update", response_model=Optional[dict]) +async def update_toolkit_user_valves_by_id( + request: Request, id: str, form_data: dict, user=Depends(get_verified_user) +): + toolkit = Tools.get_tool_by_id(id) + + if toolkit: + if id in request.app.state.TOOLS: + toolkit_module = request.app.state.TOOLS[id] + else: + toolkit_module, frontmatter = load_toolkit_module_by_id(id) + request.app.state.TOOLS[id] = toolkit_module + + if hasattr(toolkit_module, "UserValves"): + UserValves = toolkit_module.UserValves + + try: + form_data = {k: v for k, v in form_data.items() if v is not None} + user_valves = UserValves(**form_data) + Tools.update_user_valves_by_id_and_user_id( + id, user.id, user_valves.model_dump() + ) + return user_valves.model_dump() + except Exception as e: + print(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.NOT_FOUND, + ) diff --git a/backend/apps/webui/routers/users.py b/backend/apps/webui/routers/users.py new file mode 100644 index 0000000000000000000000000000000000000000..9627f0b06779bd2486ee41cb846583ec613ae3c1 --- /dev/null +++ b/backend/apps/webui/routers/users.py @@ -0,0 +1,273 @@ +from fastapi import Response, Request +from fastapi import Depends, FastAPI, HTTPException, status +from datetime import datetime, timedelta +from typing import List, Union, Optional + +from fastapi import APIRouter +from pydantic import BaseModel +import time +import uuid +import logging + +from apps.webui.models.users import ( + UserModel, + UserUpdateForm, + UserRoleUpdateForm, + UserSettings, + Users, +) +from apps.webui.models.auths import Auths +from apps.webui.models.chats import Chats + +from utils.utils import ( + get_verified_user, + get_password_hash, + get_current_user, + get_admin_user, +) +from constants import ERROR_MESSAGES + +from config import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) + +router = APIRouter() + +############################ +# GetUsers +############################ + + +@router.get("/", response_model=List[UserModel]) +async def get_users(skip: int = 0, limit: int = 50, user=Depends(get_admin_user)): + return Users.get_users(skip, limit) + + +############################ +# User Permissions +############################ + + +@router.get("/permissions/user") +async def get_user_permissions(request: Request, user=Depends(get_admin_user)): + return request.app.state.config.USER_PERMISSIONS + + +@router.post("/permissions/user") +async def update_user_permissions( + request: Request, form_data: dict, user=Depends(get_admin_user) +): + request.app.state.config.USER_PERMISSIONS = form_data + return request.app.state.config.USER_PERMISSIONS + + +############################ +# UpdateUserRole +############################ + + +@router.post("/update/role", response_model=Optional[UserModel]) +async def update_user_role(form_data: UserRoleUpdateForm, user=Depends(get_admin_user)): + + if user.id != form_data.id and form_data.id != Users.get_first_user().id: + return Users.update_user_role_by_id(form_data.id, form_data.role) + + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=ERROR_MESSAGES.ACTION_PROHIBITED, + ) + + +############################ +# GetUserSettingsBySessionUser +############################ + + +@router.get("/user/settings", response_model=Optional[UserSettings]) +async def get_user_settings_by_session_user(user=Depends(get_verified_user)): + user = Users.get_user_by_id(user.id) + if user: + return user.settings + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# UpdateUserSettingsBySessionUser +############################ + + +@router.post("/user/settings/update", response_model=UserSettings) +async def update_user_settings_by_session_user( + form_data: UserSettings, user=Depends(get_verified_user) +): + user = Users.update_user_by_id(user.id, {"settings": form_data.model_dump()}) + if user: + return user.settings + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# GetUserInfoBySessionUser +############################ + + +@router.get("/user/info", response_model=Optional[dict]) +async def get_user_info_by_session_user(user=Depends(get_verified_user)): + user = Users.get_user_by_id(user.id) + if user: + return user.info + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# UpdateUserInfoBySessionUser +############################ + + +@router.post("/user/info/update", response_model=Optional[dict]) +async def update_user_info_by_session_user( + form_data: dict, user=Depends(get_verified_user) +): + user = Users.get_user_by_id(user.id) + if user: + if user.info is None: + user.info = {} + + user = Users.update_user_by_id(user.id, {"info": {**user.info, **form_data}}) + if user: + return user.info + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# GetUserById +############################ + + +class UserResponse(BaseModel): + name: str + profile_image_url: str + + +@router.get("/{user_id}", response_model=UserResponse) +async def get_user_by_id(user_id: str, user=Depends(get_verified_user)): + + # Check if user_id is a shared chat + # If it is, get the user_id from the chat + if user_id.startswith("shared-"): + chat_id = user_id.replace("shared-", "") + chat = Chats.get_chat_by_id(chat_id) + if chat: + user_id = chat.user_id + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + user = Users.get_user_by_id(user_id) + + if user: + return UserResponse(name=user.name, profile_image_url=user.profile_image_url) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# UpdateUserById +############################ + + +@router.post("/{user_id}/update", response_model=Optional[UserModel]) +async def update_user_by_id( + user_id: str, + form_data: UserUpdateForm, + session_user=Depends(get_admin_user), +): + user = Users.get_user_by_id(user_id) + + if user: + if form_data.email.lower() != user.email: + email_user = Users.get_user_by_email(form_data.email.lower()) + if email_user: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.EMAIL_TAKEN, + ) + + if form_data.password: + hashed = get_password_hash(form_data.password) + log.debug(f"hashed: {hashed}") + Auths.update_user_password_by_id(user_id, hashed) + + Auths.update_email_by_id(user_id, form_data.email.lower()) + updated_user = Users.update_user_by_id( + user_id, + { + "name": form_data.name, + "email": form_data.email.lower(), + "profile_image_url": form_data.profile_image_url, + }, + ) + + if updated_user: + return updated_user + + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(), + ) + + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.USER_NOT_FOUND, + ) + + +############################ +# DeleteUserById +############################ + + +@router.delete("/{user_id}", response_model=bool) +async def delete_user_by_id(user_id: str, user=Depends(get_admin_user)): + if user.id != user_id: + result = Auths.delete_auth_by_id(user_id) + + if result: + return True + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=ERROR_MESSAGES.DELETE_USER_ERROR, + ) + + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=ERROR_MESSAGES.ACTION_PROHIBITED, + ) diff --git a/backend/apps/webui/routers/utils.py b/backend/apps/webui/routers/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..780ed6b43e71b7c2fc0560d4c4ead5e4f47f81e6 --- /dev/null +++ b/backend/apps/webui/routers/utils.py @@ -0,0 +1,135 @@ +from fastapi import APIRouter, UploadFile, File, Response +from fastapi import Depends, HTTPException, status +from starlette.responses import StreamingResponse, FileResponse +from pydantic import BaseModel + + +from fpdf import FPDF +import markdown +import black + + +from utils.utils import get_admin_user +from utils.misc import calculate_sha256, get_gravatar_url + +from config import OLLAMA_BASE_URLS, DATA_DIR, UPLOAD_DIR, ENABLE_ADMIN_EXPORT +from constants import ERROR_MESSAGES +from typing import List + +router = APIRouter() + + +@router.get("/gravatar") +async def get_gravatar( + email: str, +): + return get_gravatar_url(email) + + +class CodeFormatRequest(BaseModel): + code: str + + +@router.post("/code/format") +async def format_code(request: CodeFormatRequest): + try: + formatted_code = black.format_str(request.code, mode=black.Mode()) + return {"code": formatted_code} + except black.NothingChanged: + return {"code": request.code} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +class MarkdownForm(BaseModel): + md: str + + +@router.post("/markdown") +async def get_html_from_markdown( + form_data: MarkdownForm, +): + return {"html": markdown.markdown(form_data.md)} + + +class ChatForm(BaseModel): + title: str + messages: List[dict] + + +@router.post("/pdf") +async def download_chat_as_pdf( + form_data: ChatForm, +): + pdf = FPDF() + pdf.add_page() + + STATIC_DIR = "./static" + FONTS_DIR = f"{STATIC_DIR}/fonts" + + pdf.add_font("NotoSans", "", f"{FONTS_DIR}/NotoSans-Regular.ttf") + pdf.add_font("NotoSans", "b", f"{FONTS_DIR}/NotoSans-Bold.ttf") + pdf.add_font("NotoSans", "i", f"{FONTS_DIR}/NotoSans-Italic.ttf") + pdf.add_font("NotoSansKR", "", f"{FONTS_DIR}/NotoSansKR-Regular.ttf") + pdf.add_font("NotoSansJP", "", f"{FONTS_DIR}/NotoSansJP-Regular.ttf") + + pdf.set_font("NotoSans", size=12) + pdf.set_fallback_fonts(["NotoSansKR", "NotoSansJP"]) + + pdf.set_auto_page_break(auto=True, margin=15) + + # Adjust the effective page width for multi_cell + effective_page_width = ( + pdf.w - 2 * pdf.l_margin - 10 + ) # Subtracted an additional 10 for extra padding + + # Add chat messages + for message in form_data.messages: + role = message["role"] + content = message["content"] + pdf.set_font("NotoSans", "B", size=14) # Bold for the role + pdf.multi_cell(effective_page_width, 10, f"{role.upper()}", 0, "L") + pdf.ln(1) # Extra space between messages + + pdf.set_font("NotoSans", size=10) # Regular for content + pdf.multi_cell(effective_page_width, 6, content, 0, "L") + pdf.ln(1.5) # Extra space between messages + + # Save the pdf with name .pdf + pdf_bytes = pdf.output() + + return Response( + content=bytes(pdf_bytes), + media_type="application/pdf", + headers={"Content-Disposition": f"attachment;filename=chat.pdf"}, + ) + + +@router.get("/db/download") +async def download_db(user=Depends(get_admin_user)): + if not ENABLE_ADMIN_EXPORT: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + from apps.webui.internal.db import engine + + if engine.name != "sqlite": + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DB_NOT_SQLITE, + ) + return FileResponse( + engine.url.database, + media_type="application/octet-stream", + filename="webui.db", + ) + + +@router.get("/litellm/config") +async def download_litellm_config_yaml(user=Depends(get_admin_user)): + return FileResponse( + f"{DATA_DIR}/litellm/config.yaml", + media_type="application/octet-stream", + filename="config.yaml", + ) diff --git a/backend/apps/webui/utils.py b/backend/apps/webui/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..96d2b29ebfa67a5d2d27feb43b8434041cbc04e2 --- /dev/null +++ b/backend/apps/webui/utils.py @@ -0,0 +1,90 @@ +from importlib import util +import os +import re + +from config import TOOLS_DIR, FUNCTIONS_DIR + + +def extract_frontmatter(file_path): + """ + Extract frontmatter as a dictionary from the specified file path. + """ + frontmatter = {} + frontmatter_started = False + frontmatter_ended = False + frontmatter_pattern = re.compile(r"^\s*([a-z_]+):\s*(.*)\s*$", re.IGNORECASE) + + try: + with open(file_path, "r", encoding="utf-8") as file: + first_line = file.readline() + if first_line.strip() != '"""': + # The file doesn't start with triple quotes + return {} + + frontmatter_started = True + + for line in file: + if '"""' in line: + if frontmatter_started: + frontmatter_ended = True + break + + if frontmatter_started and not frontmatter_ended: + match = frontmatter_pattern.match(line) + if match: + key, value = match.groups() + frontmatter[key.strip()] = value.strip() + + except FileNotFoundError: + print(f"Error: The file {file_path} does not exist.") + return {} + except Exception as e: + print(f"An error occurred: {e}") + return {} + + return frontmatter + + +def load_toolkit_module_by_id(toolkit_id): + toolkit_path = os.path.join(TOOLS_DIR, f"{toolkit_id}.py") + spec = util.spec_from_file_location(toolkit_id, toolkit_path) + module = util.module_from_spec(spec) + frontmatter = extract_frontmatter(toolkit_path) + + try: + spec.loader.exec_module(module) + print(f"Loaded module: {module.__name__}") + if hasattr(module, "Tools"): + return module.Tools(), frontmatter + else: + raise Exception("No Tools class found") + except Exception as e: + print(f"Error loading module: {toolkit_id}") + # Move the file to the error folder + os.rename(toolkit_path, f"{toolkit_path}.error") + raise e + + +def load_function_module_by_id(function_id): + function_path = os.path.join(FUNCTIONS_DIR, f"{function_id}.py") + + spec = util.spec_from_file_location(function_id, function_path) + module = util.module_from_spec(spec) + frontmatter = extract_frontmatter(function_path) + + try: + spec.loader.exec_module(module) + print(f"Loaded module: {module.__name__}") + if hasattr(module, "Pipe"): + return module.Pipe(), "pipe", frontmatter + elif hasattr(module, "Filter"): + return module.Filter(), "filter", frontmatter + elif hasattr(module, "Action"): + return module.Action(), "action", frontmatter + else: + raise Exception("No Function class found") + except Exception as e: + print(f"Error loading module: {function_id}") + # Move the file to the error folder + os.rename(function_path, f"{function_path}.error") + raise e diff --git a/backend/config.py b/backend/config.py new file mode 100644 index 0000000000000000000000000000000000000000..395720c188817912f7aa4af069c8aae90a22076c --- /dev/null +++ b/backend/config.py @@ -0,0 +1,1382 @@ +import os +import sys +import logging +import importlib.metadata +import pkgutil +import chromadb +from chromadb import Settings +from bs4 import BeautifulSoup +from typing import TypeVar, Generic +from pydantic import BaseModel +from typing import Optional + +from pathlib import Path +import json +import yaml + +import markdown +import requests +import shutil + +from constants import ERROR_MESSAGES + +#################################### +# Load .env file +#################################### + +BACKEND_DIR = Path(__file__).parent # the path containing this file +BASE_DIR = BACKEND_DIR.parent # the path containing the backend/ + +print(BASE_DIR) + +try: + from dotenv import load_dotenv, find_dotenv + + load_dotenv(find_dotenv(str(BASE_DIR / ".env"))) +except ImportError: + print("dotenv not installed, skipping...") + + +#################################### +# LOGGING +#################################### + +log_levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] + +GLOBAL_LOG_LEVEL = os.environ.get("GLOBAL_LOG_LEVEL", "").upper() +if GLOBAL_LOG_LEVEL in log_levels: + logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True) +else: + GLOBAL_LOG_LEVEL = "INFO" + +log = logging.getLogger(__name__) +log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}") + +log_sources = [ + "AUDIO", + "COMFYUI", + "CONFIG", + "DB", + "IMAGES", + "MAIN", + "MODELS", + "OLLAMA", + "OPENAI", + "RAG", + "WEBHOOK", +] + +SRC_LOG_LEVELS = {} + +for source in log_sources: + log_env_var = source + "_LOG_LEVEL" + SRC_LOG_LEVELS[source] = os.environ.get(log_env_var, "").upper() + if SRC_LOG_LEVELS[source] not in log_levels: + SRC_LOG_LEVELS[source] = GLOBAL_LOG_LEVEL + log.info(f"{log_env_var}: {SRC_LOG_LEVELS[source]}") + +log.setLevel(SRC_LOG_LEVELS["CONFIG"]) + + +class EndpointFilter(logging.Filter): + def filter(self, record: logging.LogRecord) -> bool: + return record.getMessage().find("/health") == -1 + + +# Filter out /endpoint +logging.getLogger("uvicorn.access").addFilter(EndpointFilter()) + + +WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI") +if WEBUI_NAME != "Open WebUI": + WEBUI_NAME += " (Open WebUI)" + +WEBUI_URL = os.environ.get("WEBUI_URL", "http://localhost:3000") + +WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png" + + +#################################### +# ENV (dev,test,prod) +#################################### + +ENV = os.environ.get("ENV", "dev") + +try: + PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text()) +except: + try: + PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")} + except importlib.metadata.PackageNotFoundError: + PACKAGE_DATA = {"version": "0.0.0"} + +VERSION = PACKAGE_DATA["version"] + + +# Function to parse each section +def parse_section(section): + items = [] + for li in section.find_all("li"): + # Extract raw HTML string + raw_html = str(li) + + # Extract text without HTML tags + text = li.get_text(separator=" ", strip=True) + + # Split into title and content + parts = text.split(": ", 1) + title = parts[0].strip() if len(parts) > 1 else "" + content = parts[1].strip() if len(parts) > 1 else text + + items.append({"title": title, "content": content, "raw": raw_html}) + return items + + +try: + changelog_path = BASE_DIR / "CHANGELOG.md" + with open(str(changelog_path.absolute()), "r", encoding="utf8") as file: + changelog_content = file.read() + +except: + changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode() + + +# Convert markdown content to HTML +html_content = markdown.markdown(changelog_content) + +# Parse the HTML content +soup = BeautifulSoup(html_content, "html.parser") + +# Initialize JSON structure +changelog_json = {} + +# Iterate over each version +for version in soup.find_all("h2"): + version_number = version.get_text().strip().split(" - ")[0][1:-1] # Remove brackets + date = version.get_text().strip().split(" - ")[1] + + version_data = {"date": date} + + # Find the next sibling that is a h3 tag (section title) + current = version.find_next_sibling() + + while current and current.name != "h2": + if current.name == "h3": + section_title = current.get_text().lower() # e.g., "added", "fixed" + section_items = parse_section(current.find_next_sibling("ul")) + version_data[section_title] = section_items + + # Move to the next element + current = current.find_next_sibling() + + changelog_json[version_number] = version_data + + +CHANGELOG = changelog_json + + +#################################### +# SAFE_MODE +#################################### + +SAFE_MODE = os.environ.get("SAFE_MODE", "false").lower() == "true" + +#################################### +# WEBUI_BUILD_HASH +#################################### + +WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build") + +#################################### +# DATA/FRONTEND BUILD DIR +#################################### + +DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve() +FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve() + +RESET_CONFIG_ON_START = ( + os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true" +) +if RESET_CONFIG_ON_START: + try: + os.remove(f"{DATA_DIR}/config.json") + with open(f"{DATA_DIR}/config.json", "w") as f: + f.write("{}") + except: + pass + +try: + CONFIG_DATA = json.loads((DATA_DIR / "config.json").read_text()) +except: + CONFIG_DATA = {} + + +#################################### +# Config helpers +#################################### + + +def save_config(): + try: + with open(f"{DATA_DIR}/config.json", "w") as f: + json.dump(CONFIG_DATA, f, indent="\t") + except Exception as e: + log.exception(e) + + +def get_config_value(config_path: str): + path_parts = config_path.split(".") + cur_config = CONFIG_DATA + for key in path_parts: + if key in cur_config: + cur_config = cur_config[key] + else: + return None + return cur_config + + +T = TypeVar("T") + + +class PersistentConfig(Generic[T]): + def __init__(self, env_name: str, config_path: str, env_value: T): + self.env_name = env_name + self.config_path = config_path + self.env_value = env_value + self.config_value = get_config_value(config_path) + if self.config_value is not None: + log.info(f"'{env_name}' loaded from config.json") + self.value = self.config_value + else: + self.value = env_value + + def __str__(self): + return str(self.value) + + @property + def __dict__(self): + raise TypeError( + "PersistentConfig object cannot be converted to dict, use config_get or .value instead." + ) + + def __getattribute__(self, item): + if item == "__dict__": + raise TypeError( + "PersistentConfig object cannot be converted to dict, use config_get or .value instead." + ) + return super().__getattribute__(item) + + def save(self): + # Don't save if the value is the same as the env value and the config value + if self.env_value == self.value: + if self.config_value == self.value: + return + log.info(f"Saving '{self.env_name}' to config.json") + path_parts = self.config_path.split(".") + config = CONFIG_DATA + for key in path_parts[:-1]: + if key not in config: + config[key] = {} + config = config[key] + config[path_parts[-1]] = self.value + save_config() + self.config_value = self.value + + +class AppConfig: + _state: dict[str, PersistentConfig] + + def __init__(self): + super().__setattr__("_state", {}) + + def __setattr__(self, key, value): + if isinstance(value, PersistentConfig): + self._state[key] = value + else: + self._state[key].value = value + self._state[key].save() + + def __getattr__(self, key): + return self._state[key].value + + +#################################### +# WEBUI_AUTH (Required for security) +#################################### + +WEBUI_AUTH = os.environ.get("WEBUI_AUTH", "True").lower() == "true" +WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get( + "WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None +) +WEBUI_AUTH_TRUSTED_NAME_HEADER = os.environ.get("WEBUI_AUTH_TRUSTED_NAME_HEADER", None) +JWT_EXPIRES_IN = PersistentConfig( + "JWT_EXPIRES_IN", "auth.jwt_expiry", os.environ.get("JWT_EXPIRES_IN", "-1") +) + +#################################### +# OAuth config +#################################### + +ENABLE_OAUTH_SIGNUP = PersistentConfig( + "ENABLE_OAUTH_SIGNUP", + "oauth.enable_signup", + os.environ.get("ENABLE_OAUTH_SIGNUP", "False").lower() == "true", +) + +OAUTH_MERGE_ACCOUNTS_BY_EMAIL = PersistentConfig( + "OAUTH_MERGE_ACCOUNTS_BY_EMAIL", + "oauth.merge_accounts_by_email", + os.environ.get("OAUTH_MERGE_ACCOUNTS_BY_EMAIL", "False").lower() == "true", +) + +OAUTH_PROVIDERS = {} + +GOOGLE_CLIENT_ID = PersistentConfig( + "GOOGLE_CLIENT_ID", + "oauth.google.client_id", + os.environ.get("GOOGLE_CLIENT_ID", ""), +) + +GOOGLE_CLIENT_SECRET = PersistentConfig( + "GOOGLE_CLIENT_SECRET", + "oauth.google.client_secret", + os.environ.get("GOOGLE_CLIENT_SECRET", ""), +) + +GOOGLE_OAUTH_SCOPE = PersistentConfig( + "GOOGLE_OAUTH_SCOPE", + "oauth.google.scope", + os.environ.get("GOOGLE_OAUTH_SCOPE", "openid email profile"), +) + +MICROSOFT_CLIENT_ID = PersistentConfig( + "MICROSOFT_CLIENT_ID", + "oauth.microsoft.client_id", + os.environ.get("MICROSOFT_CLIENT_ID", ""), +) + +MICROSOFT_CLIENT_SECRET = PersistentConfig( + "MICROSOFT_CLIENT_SECRET", + "oauth.microsoft.client_secret", + os.environ.get("MICROSOFT_CLIENT_SECRET", ""), +) + +MICROSOFT_CLIENT_TENANT_ID = PersistentConfig( + "MICROSOFT_CLIENT_TENANT_ID", + "oauth.microsoft.tenant_id", + os.environ.get("MICROSOFT_CLIENT_TENANT_ID", ""), +) + +MICROSOFT_OAUTH_SCOPE = PersistentConfig( + "MICROSOFT_OAUTH_SCOPE", + "oauth.microsoft.scope", + os.environ.get("MICROSOFT_OAUTH_SCOPE", "openid email profile"), +) + +OAUTH_CLIENT_ID = PersistentConfig( + "OAUTH_CLIENT_ID", + "oauth.oidc.client_id", + os.environ.get("OAUTH_CLIENT_ID", ""), +) + +OAUTH_CLIENT_SECRET = PersistentConfig( + "OAUTH_CLIENT_SECRET", + "oauth.oidc.client_secret", + os.environ.get("OAUTH_CLIENT_SECRET", ""), +) + +OPENID_PROVIDER_URL = PersistentConfig( + "OPENID_PROVIDER_URL", + "oauth.oidc.provider_url", + os.environ.get("OPENID_PROVIDER_URL", ""), +) + +OAUTH_SCOPES = PersistentConfig( + "OAUTH_SCOPES", + "oauth.oidc.scopes", + os.environ.get("OAUTH_SCOPES", "openid email profile"), +) + +OAUTH_PROVIDER_NAME = PersistentConfig( + "OAUTH_PROVIDER_NAME", + "oauth.oidc.provider_name", + os.environ.get("OAUTH_PROVIDER_NAME", "SSO"), +) + +OAUTH_USERNAME_CLAIM = PersistentConfig( + "OAUTH_USERNAME_CLAIM", + "oauth.oidc.username_claim", + os.environ.get("OAUTH_USERNAME_CLAIM", "name"), +) + +OAUTH_PICTURE_CLAIM = PersistentConfig( + "OAUTH_USERNAME_CLAIM", + "oauth.oidc.avatar_claim", + os.environ.get("OAUTH_PICTURE_CLAIM", "picture"), +) + + +def load_oauth_providers(): + OAUTH_PROVIDERS.clear() + if GOOGLE_CLIENT_ID.value and GOOGLE_CLIENT_SECRET.value: + OAUTH_PROVIDERS["google"] = { + "client_id": GOOGLE_CLIENT_ID.value, + "client_secret": GOOGLE_CLIENT_SECRET.value, + "server_metadata_url": "https://accounts.google.com/.well-known/openid-configuration", + "scope": GOOGLE_OAUTH_SCOPE.value, + } + + if ( + MICROSOFT_CLIENT_ID.value + and MICROSOFT_CLIENT_SECRET.value + and MICROSOFT_CLIENT_TENANT_ID.value + ): + OAUTH_PROVIDERS["microsoft"] = { + "client_id": MICROSOFT_CLIENT_ID.value, + "client_secret": MICROSOFT_CLIENT_SECRET.value, + "server_metadata_url": f"https://login.microsoftonline.com/{MICROSOFT_CLIENT_TENANT_ID.value}/v2.0/.well-known/openid-configuration", + "scope": MICROSOFT_OAUTH_SCOPE.value, + } + + if ( + OAUTH_CLIENT_ID.value + and OAUTH_CLIENT_SECRET.value + and OPENID_PROVIDER_URL.value + ): + OAUTH_PROVIDERS["oidc"] = { + "client_id": OAUTH_CLIENT_ID.value, + "client_secret": OAUTH_CLIENT_SECRET.value, + "server_metadata_url": OPENID_PROVIDER_URL.value, + "scope": OAUTH_SCOPES.value, + "name": OAUTH_PROVIDER_NAME.value, + } + + +load_oauth_providers() + +#################################### +# Static DIR +#################################### + +STATIC_DIR = Path(os.getenv("STATIC_DIR", BACKEND_DIR / "static")).resolve() + +frontend_favicon = FRONTEND_BUILD_DIR / "static" / "favicon.png" + +if frontend_favicon.exists(): + try: + shutil.copyfile(frontend_favicon, STATIC_DIR / "favicon.png") + except Exception as e: + logging.error(f"An error occurred: {e}") +else: + logging.warning(f"Frontend favicon not found at {frontend_favicon}") + +frontend_splash = FRONTEND_BUILD_DIR / "static" / "splash.png" + +if frontend_splash.exists(): + try: + shutil.copyfile(frontend_splash, STATIC_DIR / "splash.png") + except Exception as e: + logging.error(f"An error occurred: {e}") +else: + logging.warning(f"Frontend splash not found at {frontend_splash}") + + +#################################### +# CUSTOM_NAME +#################################### + +CUSTOM_NAME = os.environ.get("CUSTOM_NAME", "") + +if CUSTOM_NAME: + try: + r = requests.get(f"https://api.openwebui.com/api/v1/custom/{CUSTOM_NAME}") + data = r.json() + if r.ok: + if "logo" in data: + WEBUI_FAVICON_URL = url = ( + f"https://api.openwebui.com{data['logo']}" + if data["logo"][0] == "/" + else data["logo"] + ) + + r = requests.get(url, stream=True) + if r.status_code == 200: + with open(f"{STATIC_DIR}/favicon.png", "wb") as f: + r.raw.decode_content = True + shutil.copyfileobj(r.raw, f) + + if "splash" in data: + url = ( + f"https://api.openwebui.com{data['splash']}" + if data["splash"][0] == "/" + else data["splash"] + ) + + r = requests.get(url, stream=True) + if r.status_code == 200: + with open(f"{STATIC_DIR}/splash.png", "wb") as f: + r.raw.decode_content = True + shutil.copyfileobj(r.raw, f) + + WEBUI_NAME = data["name"] + except Exception as e: + log.exception(e) + pass + + +#################################### +# File Upload DIR +#################################### + +UPLOAD_DIR = f"{DATA_DIR}/uploads" +Path(UPLOAD_DIR).mkdir(parents=True, exist_ok=True) + + +#################################### +# Cache DIR +#################################### + +CACHE_DIR = f"{DATA_DIR}/cache" +Path(CACHE_DIR).mkdir(parents=True, exist_ok=True) + + +#################################### +# Docs DIR +#################################### + +DOCS_DIR = os.getenv("DOCS_DIR", f"{DATA_DIR}/docs") +Path(DOCS_DIR).mkdir(parents=True, exist_ok=True) + + +#################################### +# Tools DIR +#################################### + +TOOLS_DIR = os.getenv("TOOLS_DIR", f"{DATA_DIR}/tools") +Path(TOOLS_DIR).mkdir(parents=True, exist_ok=True) + + +#################################### +# Functions DIR +#################################### + +FUNCTIONS_DIR = os.getenv("FUNCTIONS_DIR", f"{DATA_DIR}/functions") +Path(FUNCTIONS_DIR).mkdir(parents=True, exist_ok=True) + + +#################################### +# LITELLM_CONFIG +#################################### + + +def create_config_file(file_path): + directory = os.path.dirname(file_path) + + # Check if directory exists, if not, create it + if not os.path.exists(directory): + os.makedirs(directory) + + # Data to write into the YAML file + config_data = { + "general_settings": {}, + "litellm_settings": {}, + "model_list": [], + "router_settings": {}, + } + + # Write data to YAML file + with open(file_path, "w") as file: + yaml.dump(config_data, file) + + +LITELLM_CONFIG_PATH = f"{DATA_DIR}/litellm/config.yaml" + +# if not os.path.exists(LITELLM_CONFIG_PATH): +# log.info("Config file doesn't exist. Creating...") +# create_config_file(LITELLM_CONFIG_PATH) +# log.info("Config file created successfully.") + + +#################################### +# OLLAMA_BASE_URL +#################################### + + +ENABLE_OLLAMA_API = PersistentConfig( + "ENABLE_OLLAMA_API", + "ollama.enable", + os.environ.get("ENABLE_OLLAMA_API", "True").lower() == "true", +) + +OLLAMA_API_BASE_URL = os.environ.get( + "OLLAMA_API_BASE_URL", "http://localhost:11434/api" +) + +OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "") +AIOHTTP_CLIENT_TIMEOUT = os.environ.get("AIOHTTP_CLIENT_TIMEOUT", "") + +if AIOHTTP_CLIENT_TIMEOUT == "": + AIOHTTP_CLIENT_TIMEOUT = None +else: + try: + AIOHTTP_CLIENT_TIMEOUT = int(AIOHTTP_CLIENT_TIMEOUT) + except: + AIOHTTP_CLIENT_TIMEOUT = 300 + + +K8S_FLAG = os.environ.get("K8S_FLAG", "") +USE_OLLAMA_DOCKER = os.environ.get("USE_OLLAMA_DOCKER", "false") + +if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "": + OLLAMA_BASE_URL = ( + OLLAMA_API_BASE_URL[:-4] + if OLLAMA_API_BASE_URL.endswith("/api") + else OLLAMA_API_BASE_URL + ) + +if ENV == "prod": + if OLLAMA_BASE_URL == "/ollama" and not K8S_FLAG: + if USE_OLLAMA_DOCKER.lower() == "true": + # if you use all-in-one docker container (Open WebUI + Ollama) + # with the docker build arg USE_OLLAMA=true (--build-arg="USE_OLLAMA=true") this only works with http://localhost:11434 + OLLAMA_BASE_URL = "http://localhost:11434" + else: + OLLAMA_BASE_URL = "http://host.docker.internal:11434" + elif K8S_FLAG: + OLLAMA_BASE_URL = "http://ollama-service.open-webui.svc.cluster.local:11434" + + +OLLAMA_BASE_URLS = os.environ.get("OLLAMA_BASE_URLS", "") +OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != "" else OLLAMA_BASE_URL + +OLLAMA_BASE_URLS = [url.strip() for url in OLLAMA_BASE_URLS.split(";")] +OLLAMA_BASE_URLS = PersistentConfig( + "OLLAMA_BASE_URLS", "ollama.base_urls", OLLAMA_BASE_URLS +) + +#################################### +# OPENAI_API +#################################### + + +ENABLE_OPENAI_API = PersistentConfig( + "ENABLE_OPENAI_API", + "openai.enable", + os.environ.get("ENABLE_OPENAI_API", "True").lower() == "true", +) + + +OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "") +OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "") + + +if OPENAI_API_BASE_URL == "": + OPENAI_API_BASE_URL = "https://api.openai.com/v1" + +OPENAI_API_KEYS = os.environ.get("OPENAI_API_KEYS", "") +OPENAI_API_KEYS = OPENAI_API_KEYS if OPENAI_API_KEYS != "" else OPENAI_API_KEY + +OPENAI_API_KEYS = [url.strip() for url in OPENAI_API_KEYS.split(";")] +OPENAI_API_KEYS = PersistentConfig( + "OPENAI_API_KEYS", "openai.api_keys", OPENAI_API_KEYS +) + +OPENAI_API_BASE_URLS = os.environ.get("OPENAI_API_BASE_URLS", "") +OPENAI_API_BASE_URLS = ( + OPENAI_API_BASE_URLS if OPENAI_API_BASE_URLS != "" else OPENAI_API_BASE_URL +) + +OPENAI_API_BASE_URLS = [ + url.strip() if url != "" else "https://api.openai.com/v1" + for url in OPENAI_API_BASE_URLS.split(";") +] +OPENAI_API_BASE_URLS = PersistentConfig( + "OPENAI_API_BASE_URLS", "openai.api_base_urls", OPENAI_API_BASE_URLS +) + +OPENAI_API_KEY = "" + +try: + OPENAI_API_KEY = OPENAI_API_KEYS.value[ + OPENAI_API_BASE_URLS.value.index("https://api.openai.com/v1") + ] +except: + pass + +OPENAI_API_BASE_URL = "https://api.openai.com/v1" + +#################################### +# WEBUI +#################################### + +ENABLE_SIGNUP = PersistentConfig( + "ENABLE_SIGNUP", + "ui.enable_signup", + ( + False + if not WEBUI_AUTH + else os.environ.get("ENABLE_SIGNUP", "True").lower() == "true" + ), +) + +ENABLE_LOGIN_FORM = PersistentConfig( + "ENABLE_LOGIN_FORM", + "ui.ENABLE_LOGIN_FORM", + os.environ.get("ENABLE_LOGIN_FORM", "True").lower() == "true", +) + +DEFAULT_LOCALE = PersistentConfig( + "DEFAULT_LOCALE", + "ui.default_locale", + os.environ.get("DEFAULT_LOCALE", ""), +) + +DEFAULT_MODELS = PersistentConfig( + "DEFAULT_MODELS", "ui.default_models", os.environ.get("DEFAULT_MODELS", None) +) + +DEFAULT_PROMPT_SUGGESTIONS = PersistentConfig( + "DEFAULT_PROMPT_SUGGESTIONS", + "ui.prompt_suggestions", + [ + { + "title": ["Help me study", "vocabulary for a college entrance exam"], + "content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.", + }, + { + "title": ["Give me ideas", "for what to do with my kids' art"], + "content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.", + }, + { + "title": ["Tell me a fun fact", "about the Roman Empire"], + "content": "Tell me a random fun fact about the Roman Empire", + }, + { + "title": ["Show me a code snippet", "of a website's sticky header"], + "content": "Show me a code snippet of a website's sticky header in CSS and JavaScript.", + }, + { + "title": [ + "Explain options trading", + "if I'm familiar with buying and selling stocks", + ], + "content": "Explain options trading in simple terms if I'm familiar with buying and selling stocks.", + }, + { + "title": ["Overcome procrastination", "give me tips"], + "content": "Could you start by asking me about instances when I procrastinate the most and then give me some suggestions to overcome it?", + }, + ], +) + +DEFAULT_USER_ROLE = PersistentConfig( + "DEFAULT_USER_ROLE", + "ui.default_user_role", + os.getenv("DEFAULT_USER_ROLE", "pending"), +) + +USER_PERMISSIONS_CHAT_DELETION = ( + os.environ.get("USER_PERMISSIONS_CHAT_DELETION", "True").lower() == "true" +) + +USER_PERMISSIONS = PersistentConfig( + "USER_PERMISSIONS", + "ui.user_permissions", + {"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}}, +) + +ENABLE_MODEL_FILTER = PersistentConfig( + "ENABLE_MODEL_FILTER", + "model_filter.enable", + os.environ.get("ENABLE_MODEL_FILTER", "False").lower() == "true", +) +MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "") +MODEL_FILTER_LIST = PersistentConfig( + "MODEL_FILTER_LIST", + "model_filter.list", + [model.strip() for model in MODEL_FILTER_LIST.split(";")], +) + +WEBHOOK_URL = PersistentConfig( + "WEBHOOK_URL", "webhook_url", os.environ.get("WEBHOOK_URL", "") +) + +ENABLE_ADMIN_EXPORT = os.environ.get("ENABLE_ADMIN_EXPORT", "True").lower() == "true" + +ENABLE_COMMUNITY_SHARING = PersistentConfig( + "ENABLE_COMMUNITY_SHARING", + "ui.enable_community_sharing", + os.environ.get("ENABLE_COMMUNITY_SHARING", "True").lower() == "true", +) + + +class BannerModel(BaseModel): + id: str + type: str + title: Optional[str] = None + content: str + dismissible: bool + timestamp: int + + +try: + banners = json.loads(os.environ.get("WEBUI_BANNERS", "[]")) + banners = [BannerModel(**banner) for banner in banners] +except Exception as e: + print(f"Error loading WEBUI_BANNERS: {e}") + banners = [] + +WEBUI_BANNERS = PersistentConfig("WEBUI_BANNERS", "ui.banners", banners) + + +SHOW_ADMIN_DETAILS = PersistentConfig( + "SHOW_ADMIN_DETAILS", + "auth.admin.show", + os.environ.get("SHOW_ADMIN_DETAILS", "true").lower() == "true", +) + +ADMIN_EMAIL = PersistentConfig( + "ADMIN_EMAIL", + "auth.admin.email", + os.environ.get("ADMIN_EMAIL", None), +) + + +#################################### +# TASKS +#################################### + + +TASK_MODEL = PersistentConfig( + "TASK_MODEL", + "task.model.default", + os.environ.get("TASK_MODEL", ""), +) + +TASK_MODEL_EXTERNAL = PersistentConfig( + "TASK_MODEL_EXTERNAL", + "task.model.external", + os.environ.get("TASK_MODEL_EXTERNAL", ""), +) + +TITLE_GENERATION_PROMPT_TEMPLATE = PersistentConfig( + "TITLE_GENERATION_PROMPT_TEMPLATE", + "task.title.prompt_template", + os.environ.get( + "TITLE_GENERATION_PROMPT_TEMPLATE", + """Here is the query: +{{prompt:middletruncate:8000}} + +Create a concise, 3-5 word phrase with an emoji as a title for the previous query. Suitable Emojis for the summary can be used to enhance understanding but avoid quotation marks or special formatting. RESPOND ONLY WITH THE TITLE TEXT. + +Examples of titles: +📉 Stock Market Trends +🍪 Perfect Chocolate Chip Recipe +Evolution of Music Streaming +Remote Work Productivity Tips +Artificial Intelligence in Healthcare +🎮 Video Game Development Insights""", + ), +) + + +SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = PersistentConfig( + "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE", + "task.search.prompt_template", + os.environ.get( + "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE", + """You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is {{CURRENT_DATE}}. + +Question: +{{prompt:end:4000}}""", + ), +) + +SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = PersistentConfig( + "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD", + "task.search.prompt_length_threshold", + int( + os.environ.get( + "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD", + 100, + ) + ), +) + +TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = PersistentConfig( + "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE", + "task.tools.prompt_template", + os.environ.get( + "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE", + """Tools: {{TOOLS}} +If a function tool doesn't match the query, return an empty string. Else, pick a function tool, fill in the parameters from the function tool's schema, and return it in the format { "name": \"functionName\", "parameters": { "key": "value" } }. Only pick a function if the user asks. Only return the object. Do not return any other text.""", + ), +) + + +#################################### +# WEBUI_SECRET_KEY +#################################### + +WEBUI_SECRET_KEY = os.environ.get( + "WEBUI_SECRET_KEY", + os.environ.get( + "WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t" + ), # DEPRECATED: remove at next major version +) + +WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get( + "WEBUI_SESSION_COOKIE_SAME_SITE", + os.environ.get("WEBUI_SESSION_COOKIE_SAME_SITE", "lax"), +) + +WEBUI_SESSION_COOKIE_SECURE = os.environ.get( + "WEBUI_SESSION_COOKIE_SECURE", + os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true", +) + +if WEBUI_AUTH and WEBUI_SECRET_KEY == "": + raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND) + +#################################### +# RAG document content extraction +#################################### + +CONTENT_EXTRACTION_ENGINE = PersistentConfig( + "CONTENT_EXTRACTION_ENGINE", + "rag.CONTENT_EXTRACTION_ENGINE", + os.environ.get("CONTENT_EXTRACTION_ENGINE", "").lower(), +) + +TIKA_SERVER_URL = PersistentConfig( + "TIKA_SERVER_URL", + "rag.tika_server_url", + os.getenv("TIKA_SERVER_URL", "http://tika:9998"), # Default for sidecar deployment +) + +#################################### +# RAG +#################################### + +CHROMA_DATA_PATH = f"{DATA_DIR}/vector_db" +CHROMA_TENANT = os.environ.get("CHROMA_TENANT", chromadb.DEFAULT_TENANT) +CHROMA_DATABASE = os.environ.get("CHROMA_DATABASE", chromadb.DEFAULT_DATABASE) +CHROMA_HTTP_HOST = os.environ.get("CHROMA_HTTP_HOST", "") +CHROMA_HTTP_PORT = int(os.environ.get("CHROMA_HTTP_PORT", "8000")) +# Comma-separated list of header=value pairs +CHROMA_HTTP_HEADERS = os.environ.get("CHROMA_HTTP_HEADERS", "") +if CHROMA_HTTP_HEADERS: + CHROMA_HTTP_HEADERS = dict( + [pair.split("=") for pair in CHROMA_HTTP_HEADERS.split(",")] + ) +else: + CHROMA_HTTP_HEADERS = None +CHROMA_HTTP_SSL = os.environ.get("CHROMA_HTTP_SSL", "false").lower() == "true" +# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (sentence-transformers/all-MiniLM-L6-v2) + +RAG_TOP_K = PersistentConfig( + "RAG_TOP_K", "rag.top_k", int(os.environ.get("RAG_TOP_K", "5")) +) +RAG_RELEVANCE_THRESHOLD = PersistentConfig( + "RAG_RELEVANCE_THRESHOLD", + "rag.relevance_threshold", + float(os.environ.get("RAG_RELEVANCE_THRESHOLD", "0.0")), +) + +ENABLE_RAG_HYBRID_SEARCH = PersistentConfig( + "ENABLE_RAG_HYBRID_SEARCH", + "rag.enable_hybrid_search", + os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true", +) + +ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = PersistentConfig( + "ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION", + "rag.enable_web_loader_ssl_verification", + os.environ.get("ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION", "True").lower() == "true", +) + +RAG_EMBEDDING_ENGINE = PersistentConfig( + "RAG_EMBEDDING_ENGINE", + "rag.embedding_engine", + os.environ.get("RAG_EMBEDDING_ENGINE", ""), +) + +PDF_EXTRACT_IMAGES = PersistentConfig( + "PDF_EXTRACT_IMAGES", + "rag.pdf_extract_images", + os.environ.get("PDF_EXTRACT_IMAGES", "False").lower() == "true", +) + +RAG_EMBEDDING_MODEL = PersistentConfig( + "RAG_EMBEDDING_MODEL", + "rag.embedding_model", + os.environ.get("RAG_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"), +) +log.info(f"Embedding model set: {RAG_EMBEDDING_MODEL.value}"), + +RAG_EMBEDDING_MODEL_AUTO_UPDATE = ( + os.environ.get("RAG_EMBEDDING_MODEL_AUTO_UPDATE", "").lower() == "true" +) + +RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE = ( + os.environ.get("RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true" +) + +RAG_EMBEDDING_OPENAI_BATCH_SIZE = PersistentConfig( + "RAG_EMBEDDING_OPENAI_BATCH_SIZE", + "rag.embedding_openai_batch_size", + os.environ.get("RAG_EMBEDDING_OPENAI_BATCH_SIZE", 1), +) + +RAG_RERANKING_MODEL = PersistentConfig( + "RAG_RERANKING_MODEL", + "rag.reranking_model", + os.environ.get("RAG_RERANKING_MODEL", ""), +) +if RAG_RERANKING_MODEL.value != "": + log.info(f"Reranking model set: {RAG_RERANKING_MODEL.value}"), + +RAG_RERANKING_MODEL_AUTO_UPDATE = ( + os.environ.get("RAG_RERANKING_MODEL_AUTO_UPDATE", "").lower() == "true" +) + +RAG_RERANKING_MODEL_TRUST_REMOTE_CODE = ( + os.environ.get("RAG_RERANKING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true" +) + + +if CHROMA_HTTP_HOST != "": + CHROMA_CLIENT = chromadb.HttpClient( + host=CHROMA_HTTP_HOST, + port=CHROMA_HTTP_PORT, + headers=CHROMA_HTTP_HEADERS, + ssl=CHROMA_HTTP_SSL, + tenant=CHROMA_TENANT, + database=CHROMA_DATABASE, + settings=Settings(allow_reset=True, anonymized_telemetry=False), + ) +else: + CHROMA_CLIENT = chromadb.PersistentClient( + path=CHROMA_DATA_PATH, + settings=Settings(allow_reset=True, anonymized_telemetry=False), + tenant=CHROMA_TENANT, + database=CHROMA_DATABASE, + ) + + +# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance +USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false") + +if USE_CUDA.lower() == "true": + DEVICE_TYPE = "cuda" +else: + DEVICE_TYPE = "cpu" + +CHUNK_SIZE = PersistentConfig( + "CHUNK_SIZE", "rag.chunk_size", int(os.environ.get("CHUNK_SIZE", "1500")) +) +CHUNK_OVERLAP = PersistentConfig( + "CHUNK_OVERLAP", + "rag.chunk_overlap", + int(os.environ.get("CHUNK_OVERLAP", "100")), +) + +DEFAULT_RAG_TEMPLATE = """Use the following context as your learned knowledge, inside XML tags. + + [context] + + +When answer to user: +- If you don't know, just say that you don't know. +- If you don't know when you are not sure, ask for clarification. +Avoid mentioning that you obtained the information from the context. +And answer according to the language of the user's question. + +Given the context information, answer the query. +Query: [query]""" + +RAG_TEMPLATE = PersistentConfig( + "RAG_TEMPLATE", + "rag.template", + os.environ.get("RAG_TEMPLATE", DEFAULT_RAG_TEMPLATE), +) + +RAG_OPENAI_API_BASE_URL = PersistentConfig( + "RAG_OPENAI_API_BASE_URL", + "rag.openai_api_base_url", + os.getenv("RAG_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL), +) +RAG_OPENAI_API_KEY = PersistentConfig( + "RAG_OPENAI_API_KEY", + "rag.openai_api_key", + os.getenv("RAG_OPENAI_API_KEY", OPENAI_API_KEY), +) + +ENABLE_RAG_LOCAL_WEB_FETCH = ( + os.getenv("ENABLE_RAG_LOCAL_WEB_FETCH", "False").lower() == "true" +) + +YOUTUBE_LOADER_LANGUAGE = PersistentConfig( + "YOUTUBE_LOADER_LANGUAGE", + "rag.youtube_loader_language", + os.getenv("YOUTUBE_LOADER_LANGUAGE", "en").split(","), +) + + +ENABLE_RAG_WEB_SEARCH = PersistentConfig( + "ENABLE_RAG_WEB_SEARCH", + "rag.web.search.enable", + os.getenv("ENABLE_RAG_WEB_SEARCH", "False").lower() == "true", +) + +RAG_WEB_SEARCH_ENGINE = PersistentConfig( + "RAG_WEB_SEARCH_ENGINE", + "rag.web.search.engine", + os.getenv("RAG_WEB_SEARCH_ENGINE", ""), +) + +# You can provide a list of your own websites to filter after performing a web search. +# This ensures the highest level of safety and reliability of the information sources. +RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = PersistentConfig( + "RAG_WEB_SEARCH_DOMAIN_FILTER_LIST", + "rag.rag.web.search.domain.filter_list", + [ + # "wikipedia.com", + # "wikimedia.org", + # "wikidata.org", + ], +) + +SEARXNG_QUERY_URL = PersistentConfig( + "SEARXNG_QUERY_URL", + "rag.web.search.searxng_query_url", + os.getenv("SEARXNG_QUERY_URL", ""), +) + +GOOGLE_PSE_API_KEY = PersistentConfig( + "GOOGLE_PSE_API_KEY", + "rag.web.search.google_pse_api_key", + os.getenv("GOOGLE_PSE_API_KEY", ""), +) + +GOOGLE_PSE_ENGINE_ID = PersistentConfig( + "GOOGLE_PSE_ENGINE_ID", + "rag.web.search.google_pse_engine_id", + os.getenv("GOOGLE_PSE_ENGINE_ID", ""), +) + +BRAVE_SEARCH_API_KEY = PersistentConfig( + "BRAVE_SEARCH_API_KEY", + "rag.web.search.brave_search_api_key", + os.getenv("BRAVE_SEARCH_API_KEY", ""), +) + +SERPSTACK_API_KEY = PersistentConfig( + "SERPSTACK_API_KEY", + "rag.web.search.serpstack_api_key", + os.getenv("SERPSTACK_API_KEY", ""), +) + +SERPSTACK_HTTPS = PersistentConfig( + "SERPSTACK_HTTPS", + "rag.web.search.serpstack_https", + os.getenv("SERPSTACK_HTTPS", "True").lower() == "true", +) + +SERPER_API_KEY = PersistentConfig( + "SERPER_API_KEY", + "rag.web.search.serper_api_key", + os.getenv("SERPER_API_KEY", ""), +) + +SERPLY_API_KEY = PersistentConfig( + "SERPLY_API_KEY", + "rag.web.search.serply_api_key", + os.getenv("SERPLY_API_KEY", ""), +) + +TAVILY_API_KEY = PersistentConfig( + "TAVILY_API_KEY", + "rag.web.search.tavily_api_key", + os.getenv("TAVILY_API_KEY", ""), +) + +RAG_WEB_SEARCH_RESULT_COUNT = PersistentConfig( + "RAG_WEB_SEARCH_RESULT_COUNT", + "rag.web.search.result_count", + int(os.getenv("RAG_WEB_SEARCH_RESULT_COUNT", "3")), +) + +RAG_WEB_SEARCH_CONCURRENT_REQUESTS = PersistentConfig( + "RAG_WEB_SEARCH_CONCURRENT_REQUESTS", + "rag.web.search.concurrent_requests", + int(os.getenv("RAG_WEB_SEARCH_CONCURRENT_REQUESTS", "10")), +) + + +#################################### +# Transcribe +#################################### + +WHISPER_MODEL = os.getenv("WHISPER_MODEL", "base") +WHISPER_MODEL_DIR = os.getenv("WHISPER_MODEL_DIR", f"{CACHE_DIR}/whisper/models") +WHISPER_MODEL_AUTO_UPDATE = ( + os.environ.get("WHISPER_MODEL_AUTO_UPDATE", "").lower() == "true" +) + + +#################################### +# Images +#################################### + +IMAGE_GENERATION_ENGINE = PersistentConfig( + "IMAGE_GENERATION_ENGINE", + "image_generation.engine", + os.getenv("IMAGE_GENERATION_ENGINE", ""), +) + +ENABLE_IMAGE_GENERATION = PersistentConfig( + "ENABLE_IMAGE_GENERATION", + "image_generation.enable", + os.environ.get("ENABLE_IMAGE_GENERATION", "").lower() == "true", +) +AUTOMATIC1111_BASE_URL = PersistentConfig( + "AUTOMATIC1111_BASE_URL", + "image_generation.automatic1111.base_url", + os.getenv("AUTOMATIC1111_BASE_URL", ""), +) +AUTOMATIC1111_API_AUTH = PersistentConfig( + "AUTOMATIC1111_API_AUTH", + "image_generation.automatic1111.api_auth", + os.getenv("AUTOMATIC1111_API_AUTH", ""), +) + +COMFYUI_BASE_URL = PersistentConfig( + "COMFYUI_BASE_URL", + "image_generation.comfyui.base_url", + os.getenv("COMFYUI_BASE_URL", ""), +) + +COMFYUI_CFG_SCALE = PersistentConfig( + "COMFYUI_CFG_SCALE", + "image_generation.comfyui.cfg_scale", + os.getenv("COMFYUI_CFG_SCALE", ""), +) + +COMFYUI_SAMPLER = PersistentConfig( + "COMFYUI_SAMPLER", + "image_generation.comfyui.sampler", + os.getenv("COMFYUI_SAMPLER", ""), +) + +COMFYUI_SCHEDULER = PersistentConfig( + "COMFYUI_SCHEDULER", + "image_generation.comfyui.scheduler", + os.getenv("COMFYUI_SCHEDULER", ""), +) + +COMFYUI_SD3 = PersistentConfig( + "COMFYUI_SD3", + "image_generation.comfyui.sd3", + os.environ.get("COMFYUI_SD3", "").lower() == "true", +) + +IMAGES_OPENAI_API_BASE_URL = PersistentConfig( + "IMAGES_OPENAI_API_BASE_URL", + "image_generation.openai.api_base_url", + os.getenv("IMAGES_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL), +) +IMAGES_OPENAI_API_KEY = PersistentConfig( + "IMAGES_OPENAI_API_KEY", + "image_generation.openai.api_key", + os.getenv("IMAGES_OPENAI_API_KEY", OPENAI_API_KEY), +) + +IMAGE_SIZE = PersistentConfig( + "IMAGE_SIZE", "image_generation.size", os.getenv("IMAGE_SIZE", "512x512") +) + +IMAGE_STEPS = PersistentConfig( + "IMAGE_STEPS", "image_generation.steps", int(os.getenv("IMAGE_STEPS", 50)) +) + +IMAGE_GENERATION_MODEL = PersistentConfig( + "IMAGE_GENERATION_MODEL", + "image_generation.model", + os.getenv("IMAGE_GENERATION_MODEL", ""), +) + +#################################### +# Audio +#################################### + +AUDIO_STT_OPENAI_API_BASE_URL = PersistentConfig( + "AUDIO_STT_OPENAI_API_BASE_URL", + "audio.stt.openai.api_base_url", + os.getenv("AUDIO_STT_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL), +) + +AUDIO_STT_OPENAI_API_KEY = PersistentConfig( + "AUDIO_STT_OPENAI_API_KEY", + "audio.stt.openai.api_key", + os.getenv("AUDIO_STT_OPENAI_API_KEY", OPENAI_API_KEY), +) + +AUDIO_STT_ENGINE = PersistentConfig( + "AUDIO_STT_ENGINE", + "audio.stt.engine", + os.getenv("AUDIO_STT_ENGINE", ""), +) + +AUDIO_STT_MODEL = PersistentConfig( + "AUDIO_STT_MODEL", + "audio.stt.model", + os.getenv("AUDIO_STT_MODEL", "whisper-1"), +) + +AUDIO_TTS_OPENAI_API_BASE_URL = PersistentConfig( + "AUDIO_TTS_OPENAI_API_BASE_URL", + "audio.tts.openai.api_base_url", + os.getenv("AUDIO_TTS_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL), +) +AUDIO_TTS_OPENAI_API_KEY = PersistentConfig( + "AUDIO_TTS_OPENAI_API_KEY", + "audio.tts.openai.api_key", + os.getenv("AUDIO_TTS_OPENAI_API_KEY", OPENAI_API_KEY), +) + +AUDIO_TTS_API_KEY = PersistentConfig( + "AUDIO_TTS_API_KEY", + "audio.tts.api_key", + os.getenv("AUDIO_TTS_API_KEY", ""), +) + +AUDIO_TTS_ENGINE = PersistentConfig( + "AUDIO_TTS_ENGINE", + "audio.tts.engine", + os.getenv("AUDIO_TTS_ENGINE", ""), +) + + +AUDIO_TTS_MODEL = PersistentConfig( + "AUDIO_TTS_MODEL", + "audio.tts.model", + os.getenv("AUDIO_TTS_MODEL", "tts-1"), +) + +AUDIO_TTS_VOICE = PersistentConfig( + "AUDIO_TTS_VOICE", + "audio.tts.voice", + os.getenv("AUDIO_TTS_VOICE", "alloy"), +) + + +#################################### +# Database +#################################### + +DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite:///{DATA_DIR}/webui.db") + +# Replace the postgres:// with postgresql:// +if "postgres://" in DATABASE_URL: + DATABASE_URL = DATABASE_URL.replace("postgres://", "postgresql://") diff --git a/backend/constants.py b/backend/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..b9c7fc430d42b4fb842b6f6360b71be3a88dd892 --- /dev/null +++ b/backend/constants.py @@ -0,0 +1,102 @@ +from enum import Enum + + +class MESSAGES(str, Enum): + DEFAULT = lambda msg="": f"{msg if msg else ''}" + MODEL_ADDED = lambda model="": f"The model '{model}' has been added successfully." + MODEL_DELETED = ( + lambda model="": f"The model '{model}' has been deleted successfully." + ) + + +class WEBHOOK_MESSAGES(str, Enum): + DEFAULT = lambda msg="": f"{msg if msg else ''}" + USER_SIGNUP = lambda username="": ( + f"New user signed up: {username}" if username else "New user signed up" + ) + + +class ERROR_MESSAGES(str, Enum): + def __str__(self) -> str: + return super().__str__() + + DEFAULT = lambda err="": f"Something went wrong :/\n{err if err else ''}" + ENV_VAR_NOT_FOUND = "Required environment variable not found. Terminating now." + CREATE_USER_ERROR = "Oops! Something went wrong while creating your account. Please try again later. If the issue persists, contact support for assistance." + DELETE_USER_ERROR = "Oops! Something went wrong. We encountered an issue while trying to delete the user. Please give it another shot." + EMAIL_MISMATCH = "Uh-oh! This email does not match the email your provider is registered with. Please check your email and try again." + EMAIL_TAKEN = "Uh-oh! This email is already registered. Sign in with your existing account or choose another email to start anew." + USERNAME_TAKEN = ( + "Uh-oh! This username is already registered. Please choose another username." + ) + COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string." + FILE_EXISTS = "Uh-oh! This file is already registered. Please choose another file." + + ID_TAKEN = "Uh-oh! This id is already registered. Please choose another id string." + MODEL_ID_TAKEN = "Uh-oh! This model id is already registered. Please choose another model id string." + + NAME_TAG_TAKEN = "Uh-oh! This name tag is already registered. Please choose another name tag string." + INVALID_TOKEN = ( + "Your session has expired or the token is invalid. Please sign in again." + ) + INVALID_CRED = "The email or password provided is incorrect. Please check for typos and try logging in again." + INVALID_EMAIL_FORMAT = "The email format you entered is invalid. Please double-check and make sure you're using a valid email address (e.g., yourname@example.com)." + INVALID_PASSWORD = ( + "The password provided is incorrect. Please check for typos and try again." + ) + INVALID_TRUSTED_HEADER = "Your provider has not provided a trusted header. Please contact your administrator for assistance." + + EXISTING_USERS = "You can't turn off authentication because there are existing users. If you want to disable WEBUI_AUTH, make sure your web interface doesn't have any existing users and is a fresh installation." + + UNAUTHORIZED = "401 Unauthorized" + ACCESS_PROHIBITED = "You do not have permission to access this resource. Please contact your administrator for assistance." + ACTION_PROHIBITED = ( + "The requested action has been restricted as a security measure." + ) + + FILE_NOT_SENT = "FILE_NOT_SENT" + FILE_NOT_SUPPORTED = "Oops! It seems like the file format you're trying to upload is not supported. Please upload a file with a supported format (e.g., JPG, PNG, PDF, TXT) and try again." + + NOT_FOUND = "We could not find what you're looking for :/" + USER_NOT_FOUND = "We could not find what you're looking for :/" + API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature." + + MALICIOUS = "Unusual activities detected, please try again in a few minutes." + + PANDOC_NOT_INSTALLED = "Pandoc is not installed on the server. Please contact your administrator for assistance." + INCORRECT_FORMAT = ( + lambda err="": f"Invalid format. Please use the correct format{err}" + ) + RATE_LIMIT_EXCEEDED = "API rate limit exceeded" + + MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found" + OPENAI_NOT_FOUND = lambda name="": "OpenAI API was not found" + OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama" + CREATE_API_KEY_ERROR = "Oops! Something went wrong while creating your API key. Please try again later. If the issue persists, contact support for assistance." + + EMPTY_CONTENT = "The content provided is empty. Please ensure that there is text or data present before proceeding." + + DB_NOT_SQLITE = "This feature is only available when running with SQLite databases." + + INVALID_URL = ( + "Oops! The URL you provided is invalid. Please double-check and try again." + ) + + WEB_SEARCH_ERROR = ( + lambda err="": f"{err if err else 'Oops! Something went wrong while searching the web.'}" + ) + + OLLAMA_API_DISABLED = ( + "The Ollama API is disabled. Please enable it to use this feature." + ) + + +class TASKS(str, Enum): + def __str__(self) -> str: + return super().__str__() + + DEFAULT = lambda task="": f"{task if task else 'generation'}" + TITLE_GENERATION = "title_generation" + EMOJI_GENERATION = "emoji_generation" + QUERY_GENERATION = "query_generation" + FUNCTION_CALLING = "function_calling" diff --git a/backend/data/config.json b/backend/data/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c7acde9176d836edc614a8e144098292a673e30 --- /dev/null +++ b/backend/data/config.json @@ -0,0 +1,36 @@ +{ + "version": 0, + "ui": { + "default_locale": "", + "prompt_suggestions": [ + { + "title": ["Help me study", "vocabulary for a college entrance exam"], + "content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option." + }, + { + "title": ["Give me ideas", "for what to do with my kids' art"], + "content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter." + }, + { + "title": ["Tell me a fun fact", "about the Roman Empire"], + "content": "Tell me a random fun fact about the Roman Empire" + }, + { + "title": ["Show me a code snippet", "of a website's sticky header"], + "content": "Show me a code snippet of a website's sticky header in CSS and JavaScript." + }, + { + "title": ["Explain options trading", "if I'm familiar with buying and selling stocks"], + "content": "Explain options trading in simple terms if I'm familiar with buying and selling stocks." + }, + { + "title": ["Overcome procrastination", "give me tips"], + "content": "Could you start by asking me about instances when I procrastinate the most and then give me some suggestions to overcome it?" + }, + { + "title": ["Grammar check", "rewrite it for better readability "], + "content": "Check the following sentence for grammar and clarity: \"[sentence]\". Rewrite it for better readability while maintaining its original meaning." + } + ] + } +} diff --git a/backend/data/litellm/config.yaml b/backend/data/litellm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7d9d2b72304cd82b7621891affb2d79a0303cd72 --- /dev/null +++ b/backend/data/litellm/config.yaml @@ -0,0 +1,4 @@ +general_settings: {} +litellm_settings: {} +model_list: [] +router_settings: {} diff --git a/backend/dev.sh b/backend/dev.sh new file mode 100755 index 0000000000000000000000000000000000000000..c66ae4ba95ebf57d53834ed3afbe8b26d77ddac5 --- /dev/null +++ b/backend/dev.sh @@ -0,0 +1,2 @@ +PORT="${PORT:-8080}" +uvicorn main:app --port $PORT --host 0.0.0.0 --forwarded-allow-ips '*' --reload \ No newline at end of file diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d96b001216b4a525b638412b67f0518099dbb0 --- /dev/null +++ b/backend/main.py @@ -0,0 +1,2308 @@ +import base64 +import uuid +from contextlib import asynccontextmanager + +from authlib.integrations.starlette_client import OAuth +from authlib.oidc.core import UserInfo +import json +import time +import os +import sys +import logging +import aiohttp +import requests +import mimetypes +import shutil +import os +import uuid +import inspect + +from fastapi import FastAPI, Request, Depends, status, UploadFile, File, Form +from fastapi.staticfiles import StaticFiles +from fastapi.responses import JSONResponse +from fastapi import HTTPException +from fastapi.middleware.cors import CORSMiddleware +from sqlalchemy import text +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.sessions import SessionMiddleware +from starlette.responses import StreamingResponse, Response, RedirectResponse + + +from apps.socket.main import sio, app as socket_app, get_event_emitter, get_event_call +from apps.ollama.main import ( + app as ollama_app, + get_all_models as get_ollama_models, + generate_openai_chat_completion as generate_ollama_chat_completion, +) +from apps.openai.main import ( + app as openai_app, + get_all_models as get_openai_models, + generate_chat_completion as generate_openai_chat_completion, +) + +from apps.audio.main import app as audio_app +from apps.images.main import app as images_app +from apps.rag.main import app as rag_app +from apps.webui.main import ( + app as webui_app, + get_pipe_models, + generate_function_chat_completion, +) +from apps.webui.internal.db import Session + + +from pydantic import BaseModel +from typing import List, Optional + +from apps.webui.models.auths import Auths +from apps.webui.models.models import Models +from apps.webui.models.tools import Tools +from apps.webui.models.functions import Functions +from apps.webui.models.users import Users + +from apps.webui.utils import load_toolkit_module_by_id, load_function_module_by_id + +from utils.utils import ( + get_admin_user, + get_verified_user, + get_current_user, + get_http_authorization_cred, + get_password_hash, + create_token, +) +from utils.task import ( + title_generation_template, + search_query_generation_template, + tools_function_calling_generation_template, +) +from utils.misc import ( + get_last_user_message, + add_or_update_system_message, + prepend_to_first_user_message_content, + parse_duration, +) + +from apps.rag.utils import get_rag_context, rag_template + +from config import ( + WEBUI_NAME, + WEBUI_URL, + WEBUI_AUTH, + ENV, + VERSION, + CHANGELOG, + FRONTEND_BUILD_DIR, + CACHE_DIR, + STATIC_DIR, + DEFAULT_LOCALE, + ENABLE_OPENAI_API, + ENABLE_OLLAMA_API, + ENABLE_MODEL_FILTER, + MODEL_FILTER_LIST, + GLOBAL_LOG_LEVEL, + SRC_LOG_LEVELS, + WEBHOOK_URL, + ENABLE_ADMIN_EXPORT, + WEBUI_BUILD_HASH, + TASK_MODEL, + TASK_MODEL_EXTERNAL, + TITLE_GENERATION_PROMPT_TEMPLATE, + SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, + SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, + TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, + SAFE_MODE, + OAUTH_PROVIDERS, + ENABLE_OAUTH_SIGNUP, + OAUTH_MERGE_ACCOUNTS_BY_EMAIL, + WEBUI_SECRET_KEY, + WEBUI_SESSION_COOKIE_SAME_SITE, + WEBUI_SESSION_COOKIE_SECURE, + AppConfig, +) + +from constants import ERROR_MESSAGES, WEBHOOK_MESSAGES, TASKS +from utils.webhook import post_webhook + +if SAFE_MODE: + print("SAFE MODE ENABLED") + Functions.deactivate_all_functions() + + +logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL) +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MAIN"]) + + +class SPAStaticFiles(StaticFiles): + async def get_response(self, path: str, scope): + try: + return await super().get_response(path, scope) + except (HTTPException, StarletteHTTPException) as ex: + if ex.status_code == 404: + return await super().get_response("index.html", scope) + else: + raise ex + + +print( + rf""" + ___ __ __ _ _ _ ___ + / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _| +| | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || | +| |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || | + \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___| + |_| + + +v{VERSION} - building the best open-source AI user interface. +{f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""} +https://github.com/open-webui/open-webui +""" +) + + +def run_migrations(): + try: + from alembic.config import Config + from alembic import command + + alembic_cfg = Config("alembic.ini") + command.upgrade(alembic_cfg, "head") + except Exception as e: + print(f"Error: {e}") + + +@asynccontextmanager +async def lifespan(app: FastAPI): + run_migrations() + yield + + +app = FastAPI( + docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan +) + +app.state.config = AppConfig() + +app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API +app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API + +app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER +app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST + +app.state.config.WEBHOOK_URL = WEBHOOK_URL + + +app.state.config.TASK_MODEL = TASK_MODEL +app.state.config.TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL +app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE +app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = ( + SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE +) +app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = ( + SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD +) +app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = ( + TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE +) + +app.state.MODELS = {} + +origins = ["*"] + + +################################## +# +# ChatCompletion Middleware +# +################################## + + +async def get_body_and_model_and_user(request): + # Read the original request body + body = await request.body() + body_str = body.decode("utf-8") + body = json.loads(body_str) if body_str else {} + + model_id = body["model"] + if model_id not in app.state.MODELS: + raise Exception("Model not found") + model = app.state.MODELS[model_id] + + user = get_current_user( + request, + get_http_authorization_cred(request.headers.get("Authorization")), + ) + + return body, model, user + + +def get_task_model_id(default_model_id): + # Set the task model + task_model_id = default_model_id + # Check if the user has a custom task model and use that model + if app.state.MODELS[task_model_id]["owned_by"] == "ollama": + if ( + app.state.config.TASK_MODEL + and app.state.config.TASK_MODEL in app.state.MODELS + ): + task_model_id = app.state.config.TASK_MODEL + else: + if ( + app.state.config.TASK_MODEL_EXTERNAL + and app.state.config.TASK_MODEL_EXTERNAL in app.state.MODELS + ): + task_model_id = app.state.config.TASK_MODEL_EXTERNAL + + return task_model_id + + +def get_filter_function_ids(model): + def get_priority(function_id): + function = Functions.get_function_by_id(function_id) + if function is not None and hasattr(function, "valves"): + return (function.valves if function.valves else {}).get("priority", 0) + return 0 + + filter_ids = [function.id for function in Functions.get_global_filter_functions()] + if "info" in model and "meta" in model["info"]: + filter_ids.extend(model["info"]["meta"].get("filterIds", [])) + filter_ids = list(set(filter_ids)) + + enabled_filter_ids = [ + function.id + for function in Functions.get_functions_by_type("filter", active_only=True) + ] + + filter_ids = [ + filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids + ] + + filter_ids.sort(key=get_priority) + return filter_ids + + +async def get_function_call_response( + messages, + files, + tool_id, + template, + task_model_id, + user, + __event_emitter__=None, + __event_call__=None, +): + tool = Tools.get_tool_by_id(tool_id) + tools_specs = json.dumps(tool.specs, indent=2) + content = tools_function_calling_generation_template(template, tools_specs) + + user_message = get_last_user_message(messages) + prompt = ( + "History:\n" + + "\n".join( + [ + f"{message['role'].upper()}: \"\"\"{message['content']}\"\"\"" + for message in messages[::-1][:4] + ] + ) + + f"\nQuery: {user_message}" + ) + + print(prompt) + + payload = { + "model": task_model_id, + "messages": [ + {"role": "system", "content": content}, + {"role": "user", "content": f"Query: {prompt}"}, + ], + "stream": False, + "task": str(TASKS.FUNCTION_CALLING), + } + + try: + payload = filter_pipeline(payload, user) + except Exception as e: + raise e + + model = app.state.MODELS[task_model_id] + + response = None + try: + response = await generate_chat_completions(form_data=payload, user=user) + content = None + + if hasattr(response, "body_iterator"): + async for chunk in response.body_iterator: + data = json.loads(chunk.decode("utf-8")) + content = data["choices"][0]["message"]["content"] + + # Cleanup any remaining background tasks if necessary + if response.background is not None: + await response.background() + else: + content = response["choices"][0]["message"]["content"] + + if content is None: + return None, None, False + + # Parse the function response + print(f"content: {content}") + result = json.loads(content) + print(result) + + citation = None + + if "name" not in result: + return None, None, False + + # Call the function + if tool_id in webui_app.state.TOOLS: + toolkit_module = webui_app.state.TOOLS[tool_id] + else: + toolkit_module, _ = load_toolkit_module_by_id(tool_id) + webui_app.state.TOOLS[tool_id] = toolkit_module + + file_handler = False + # check if toolkit_module has file_handler self variable + if hasattr(toolkit_module, "file_handler"): + file_handler = True + print("file_handler: ", file_handler) + + if hasattr(toolkit_module, "valves") and hasattr(toolkit_module, "Valves"): + valves = Tools.get_tool_valves_by_id(tool_id) + toolkit_module.valves = toolkit_module.Valves(**(valves if valves else {})) + + function = getattr(toolkit_module, result["name"]) + function_result = None + try: + # Get the signature of the function + sig = inspect.signature(function) + params = result["parameters"] + + # Extra parameters to be passed to the function + extra_params = { + "__model__": model, + "__id__": tool_id, + "__messages__": messages, + "__files__": files, + "__event_emitter__": __event_emitter__, + "__event_call__": __event_call__, + } + + # Add extra params in contained in function signature + for key, value in extra_params.items(): + if key in sig.parameters: + params[key] = value + + if "__user__" in sig.parameters: + # Call the function with the '__user__' parameter included + __user__ = { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + } + + try: + if hasattr(toolkit_module, "UserValves"): + __user__["valves"] = toolkit_module.UserValves( + **Tools.get_user_valves_by_id_and_user_id(tool_id, user.id) + ) + except Exception as e: + print(e) + + params = {**params, "__user__": __user__} + + if inspect.iscoroutinefunction(function): + function_result = await function(**params) + else: + function_result = function(**params) + + if hasattr(toolkit_module, "citation") and toolkit_module.citation: + citation = { + "source": {"name": f"TOOL:{tool.name}/{result['name']}"}, + "document": [function_result], + "metadata": [{"source": result["name"]}], + } + except Exception as e: + print(e) + + # Add the function result to the system prompt + if function_result is not None: + return function_result, citation, file_handler + except Exception as e: + print(f"Error: {e}") + + return None, None, False + + +async def chat_completion_functions_handler( + body, model, user, __event_emitter__, __event_call__ +): + skip_files = None + + filter_ids = get_filter_function_ids(model) + for filter_id in filter_ids: + filter = Functions.get_function_by_id(filter_id) + if not filter: + continue + + if filter_id in webui_app.state.FUNCTIONS: + function_module = webui_app.state.FUNCTIONS[filter_id] + else: + function_module, _, _ = load_function_module_by_id(filter_id) + webui_app.state.FUNCTIONS[filter_id] = function_module + + # Check if the function has a file_handler variable + if hasattr(function_module, "file_handler"): + skip_files = function_module.file_handler + + if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): + valves = Functions.get_function_valves_by_id(filter_id) + function_module.valves = function_module.Valves( + **(valves if valves else {}) + ) + + if not hasattr(function_module, "inlet"): + continue + + try: + inlet = function_module.inlet + + # Get the signature of the function + sig = inspect.signature(inlet) + params = {"body": body} + + # Extra parameters to be passed to the function + extra_params = { + "__model__": model, + "__id__": filter_id, + "__event_emitter__": __event_emitter__, + "__event_call__": __event_call__, + } + + # Add extra params in contained in function signature + for key, value in extra_params.items(): + if key in sig.parameters: + params[key] = value + + if "__user__" in sig.parameters: + __user__ = { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + } + + try: + if hasattr(function_module, "UserValves"): + __user__["valves"] = function_module.UserValves( + **Functions.get_user_valves_by_id_and_user_id( + filter_id, user.id + ) + ) + except Exception as e: + print(e) + + params = {**params, "__user__": __user__} + + if inspect.iscoroutinefunction(inlet): + body = await inlet(**params) + else: + body = inlet(**params) + + except Exception as e: + print(f"Error: {e}") + raise e + + if skip_files: + if "files" in body: + del body["files"] + + return body, {} + + +async def chat_completion_tools_handler(body, user, __event_emitter__, __event_call__): + skip_files = None + + contexts = [] + citations = None + + task_model_id = get_task_model_id(body["model"]) + + # If tool_ids field is present, call the functions + if "tool_ids" in body: + print(body["tool_ids"]) + for tool_id in body["tool_ids"]: + print(tool_id) + try: + response, citation, file_handler = await get_function_call_response( + messages=body["messages"], + files=body.get("files", []), + tool_id=tool_id, + template=app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, + task_model_id=task_model_id, + user=user, + __event_emitter__=__event_emitter__, + __event_call__=__event_call__, + ) + + print(file_handler) + if isinstance(response, str): + contexts.append(response) + + if citation: + if citations is None: + citations = [citation] + else: + citations.append(citation) + + if file_handler: + skip_files = True + + except Exception as e: + print(f"Error: {e}") + del body["tool_ids"] + print(f"tool_contexts: {contexts}") + + if skip_files: + if "files" in body: + del body["files"] + + return body, { + **({"contexts": contexts} if contexts is not None else {}), + **({"citations": citations} if citations is not None else {}), + } + + +async def chat_completion_files_handler(body): + contexts = [] + citations = None + + if "files" in body: + files = body["files"] + del body["files"] + + contexts, citations = get_rag_context( + files=files, + messages=body["messages"], + embedding_function=rag_app.state.EMBEDDING_FUNCTION, + k=rag_app.state.config.TOP_K, + reranking_function=rag_app.state.sentence_transformer_rf, + r=rag_app.state.config.RELEVANCE_THRESHOLD, + hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH, + ) + + log.debug(f"rag_contexts: {contexts}, citations: {citations}") + + return body, { + **({"contexts": contexts} if contexts is not None else {}), + **({"citations": citations} if citations is not None else {}), + } + + +class ChatCompletionMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + if request.method == "POST" and any( + endpoint in request.url.path + for endpoint in ["/ollama/api/chat", "/chat/completions"] + ): + log.debug(f"request.url.path: {request.url.path}") + + try: + body, model, user = await get_body_and_model_and_user(request) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"detail": str(e)}, + ) + + # Extract valves from the request body + valves = None + if "valves" in body: + valves = body["valves"] + del body["valves"] + + # Extract session_id, chat_id and message_id from the request body + session_id = None + if "session_id" in body: + session_id = body["session_id"] + del body["session_id"] + chat_id = None + if "chat_id" in body: + chat_id = body["chat_id"] + del body["chat_id"] + message_id = None + if "id" in body: + message_id = body["id"] + del body["id"] + + __event_emitter__ = await get_event_emitter( + {"chat_id": chat_id, "message_id": message_id, "session_id": session_id} + ) + __event_call__ = await get_event_call( + {"chat_id": chat_id, "message_id": message_id, "session_id": session_id} + ) + + # Initialize data_items to store additional data to be sent to the client + data_items = [] + + # Initialize context, and citations + contexts = [] + citations = [] + + try: + body, flags = await chat_completion_functions_handler( + body, model, user, __event_emitter__, __event_call__ + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"detail": str(e)}, + ) + + try: + body, flags = await chat_completion_tools_handler( + body, user, __event_emitter__, __event_call__ + ) + + contexts.extend(flags.get("contexts", [])) + citations.extend(flags.get("citations", [])) + except Exception as e: + print(e) + pass + + try: + body, flags = await chat_completion_files_handler(body) + + contexts.extend(flags.get("contexts", [])) + citations.extend(flags.get("citations", [])) + except Exception as e: + print(e) + pass + + # If context is not empty, insert it into the messages + if len(contexts) > 0: + context_string = "/n".join(contexts).strip() + prompt = get_last_user_message(body["messages"]) + + # Workaround for Ollama 2.0+ system prompt issue + # TODO: replace with add_or_update_system_message + if model["owned_by"] == "ollama": + body["messages"] = prepend_to_first_user_message_content( + rag_template( + rag_app.state.config.RAG_TEMPLATE, context_string, prompt + ), + body["messages"], + ) + else: + body["messages"] = add_or_update_system_message( + rag_template( + rag_app.state.config.RAG_TEMPLATE, context_string, prompt + ), + body["messages"], + ) + + # If there are citations, add them to the data_items + if len(citations) > 0: + data_items.append({"citations": citations}) + + body["metadata"] = { + "session_id": session_id, + "chat_id": chat_id, + "message_id": message_id, + "valves": valves, + } + + modified_body_bytes = json.dumps(body).encode("utf-8") + # Replace the request body with the modified one + request._body = modified_body_bytes + # Set custom header to ensure content-length matches new body length + request.headers.__dict__["_list"] = [ + (b"content-length", str(len(modified_body_bytes)).encode("utf-8")), + *[ + (k, v) + for k, v in request.headers.raw + if k.lower() != b"content-length" + ], + ] + + response = await call_next(request) + if isinstance(response, StreamingResponse): + # If it's a streaming response, inject it as SSE event or NDJSON line + content_type = response.headers.get("Content-Type") + if "text/event-stream" in content_type: + return StreamingResponse( + self.openai_stream_wrapper(response.body_iterator, data_items), + ) + if "application/x-ndjson" in content_type: + return StreamingResponse( + self.ollama_stream_wrapper(response.body_iterator, data_items), + ) + + return response + else: + return response + + # If it's not a chat completion request, just pass it through + response = await call_next(request) + return response + + async def _receive(self, body: bytes): + return {"type": "http.request", "body": body, "more_body": False} + + async def openai_stream_wrapper(self, original_generator, data_items): + for item in data_items: + yield f"data: {json.dumps(item)}\n\n" + + async for data in original_generator: + yield data + + async def ollama_stream_wrapper(self, original_generator, data_items): + for item in data_items: + yield f"{json.dumps(item)}\n" + + async for data in original_generator: + yield data + + +app.add_middleware(ChatCompletionMiddleware) + +################################## +# +# Pipeline Middleware +# +################################## + + +def get_sorted_filters(model_id): + filters = [ + model + for model in app.state.MODELS.values() + if "pipeline" in model + and "type" in model["pipeline"] + and model["pipeline"]["type"] == "filter" + and ( + model["pipeline"]["pipelines"] == ["*"] + or any( + model_id == target_model_id + for target_model_id in model["pipeline"]["pipelines"] + ) + ) + ] + sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"]) + return sorted_filters + + +def filter_pipeline(payload, user): + user = {"id": user.id, "email": user.email, "name": user.name, "role": user.role} + model_id = payload["model"] + sorted_filters = get_sorted_filters(model_id) + + model = app.state.MODELS[model_id] + + if "pipeline" in model: + sorted_filters.append(model) + + for filter in sorted_filters: + r = None + try: + urlIdx = filter["urlIdx"] + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + if key != "": + headers = {"Authorization": f"Bearer {key}"} + r = requests.post( + f"{url}/{filter['id']}/filter/inlet", + headers=headers, + json={ + "user": user, + "body": payload, + }, + ) + + r.raise_for_status() + payload = r.json() + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + if r is not None: + res = r.json() + if "detail" in res: + raise Exception(r.status_code, res["detail"]) + + return payload + + +class PipelineMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + if request.method == "POST" and ( + "/ollama/api/chat" in request.url.path + or "/chat/completions" in request.url.path + ): + log.debug(f"request.url.path: {request.url.path}") + + # Read the original request body + body = await request.body() + # Decode body to string + body_str = body.decode("utf-8") + # Parse string to JSON + data = json.loads(body_str) if body_str else {} + + user = get_current_user( + request, + get_http_authorization_cred(request.headers.get("Authorization")), + ) + + try: + data = filter_pipeline(data, user) + except Exception as e: + return JSONResponse( + status_code=e.args[0], + content={"detail": e.args[1]}, + ) + + modified_body_bytes = json.dumps(data).encode("utf-8") + # Replace the request body with the modified one + request._body = modified_body_bytes + # Set custom header to ensure content-length matches new body length + request.headers.__dict__["_list"] = [ + (b"content-length", str(len(modified_body_bytes)).encode("utf-8")), + *[ + (k, v) + for k, v in request.headers.raw + if k.lower() != b"content-length" + ], + ] + + response = await call_next(request) + return response + + async def _receive(self, body: bytes): + return {"type": "http.request", "body": body, "more_body": False} + + +app.add_middleware(PipelineMiddleware) + + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.middleware("http") +async def commit_session_after_request(request: Request, call_next): + response = await call_next(request) + log.debug("Commit session after request") + Session.commit() + return response + + +@app.middleware("http") +async def check_url(request: Request, call_next): + if len(app.state.MODELS) == 0: + await get_all_models() + else: + pass + + start_time = int(time.time()) + response = await call_next(request) + process_time = int(time.time()) - start_time + response.headers["X-Process-Time"] = str(process_time) + + return response + + +@app.middleware("http") +async def update_embedding_function(request: Request, call_next): + response = await call_next(request) + if "/embedding/update" in request.url.path: + webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION + return response + + +app.mount("/ws", socket_app) + +app.mount("/ollama", ollama_app) +app.mount("/openai", openai_app) + +app.mount("/images/api/v1", images_app) +app.mount("/audio/api/v1", audio_app) +app.mount("/rag/api/v1", rag_app) + +app.mount("/api/v1", webui_app) + +webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION + + +async def get_all_models(): + # TODO: Optimize this function + pipe_models = [] + openai_models = [] + ollama_models = [] + + pipe_models = await get_pipe_models() + + if app.state.config.ENABLE_OPENAI_API: + openai_models = await get_openai_models() + openai_models = openai_models["data"] + + if app.state.config.ENABLE_OLLAMA_API: + ollama_models = await get_ollama_models() + ollama_models = [ + { + "id": model["model"], + "name": model["name"], + "object": "model", + "created": int(time.time()), + "owned_by": "ollama", + "ollama": model, + } + for model in ollama_models["models"] + ] + + models = pipe_models + openai_models + ollama_models + + global_action_ids = [ + function.id for function in Functions.get_global_action_functions() + ] + enabled_action_ids = [ + function.id + for function in Functions.get_functions_by_type("action", active_only=True) + ] + + custom_models = Models.get_all_models() + for custom_model in custom_models: + if custom_model.base_model_id == None: + for model in models: + if ( + custom_model.id == model["id"] + or custom_model.id == model["id"].split(":")[0] + ): + model["name"] = custom_model.name + model["info"] = custom_model.model_dump() + + action_ids = [] + if "info" in model and "meta" in model["info"]: + action_ids.extend(model["info"]["meta"].get("actionIds", [])) + + model["action_ids"] = action_ids + else: + owned_by = "openai" + pipe = None + action_ids = [] + + for model in models: + if ( + custom_model.base_model_id == model["id"] + or custom_model.base_model_id == model["id"].split(":")[0] + ): + owned_by = model["owned_by"] + if "pipe" in model: + pipe = model["pipe"] + + if "info" in model and "meta" in model["info"]: + action_ids.extend(model["info"]["meta"].get("actionIds", [])) + break + + models.append( + { + "id": custom_model.id, + "name": custom_model.name, + "object": "model", + "created": custom_model.created_at, + "owned_by": owned_by, + "info": custom_model.model_dump(), + "preset": True, + **({"pipe": pipe} if pipe is not None else {}), + "action_ids": action_ids, + } + ) + + for model in models: + action_ids = [] + if "action_ids" in model: + action_ids = model["action_ids"] + del model["action_ids"] + + action_ids = action_ids + global_action_ids + action_ids = list(set(action_ids)) + action_ids = [ + action_id for action_id in action_ids if action_id in enabled_action_ids + ] + + model["actions"] = [] + for action_id in action_ids: + action = Functions.get_function_by_id(action_id) + model["actions"].append( + { + "id": action_id, + "name": action.name, + "description": action.meta.description, + "icon_url": action.meta.manifest.get("icon_url", None), + } + ) + + app.state.MODELS = {model["id"]: model for model in models} + webui_app.state.MODELS = app.state.MODELS + + return models + + +@app.get("/api/models") +async def get_models(user=Depends(get_verified_user)): + models = await get_all_models() + + # Filter out filter pipelines + models = [ + model + for model in models + if "pipeline" not in model or model["pipeline"].get("type", None) != "filter" + ] + + if app.state.config.ENABLE_MODEL_FILTER: + if user.role == "user": + models = list( + filter( + lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST, + models, + ) + ) + return {"data": models} + + return {"data": models} + + +@app.post("/api/chat/completions") +async def generate_chat_completions(form_data: dict, user=Depends(get_verified_user)): + model_id = form_data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + model = app.state.MODELS[model_id] + + # `task` field is used to determine the type of the request, e.g. `title_generation`, `query_generation`, etc. + task = None + if "task" in form_data: + task = form_data["task"] + del form_data["task"] + + if task: + if "metadata" in form_data: + form_data["metadata"]["task"] = task + else: + form_data["metadata"] = {"task": task} + + if model.get("pipe"): + return await generate_function_chat_completion(form_data, user=user) + if model["owned_by"] == "ollama": + print("generate_ollama_chat_completion") + return await generate_ollama_chat_completion(form_data, user=user) + else: + return await generate_openai_chat_completion(form_data, user=user) + + +@app.post("/api/chat/completed") +async def chat_completed(form_data: dict, user=Depends(get_verified_user)): + data = form_data + model_id = data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + model = app.state.MODELS[model_id] + + sorted_filters = get_sorted_filters(model_id) + if "pipeline" in model: + sorted_filters = [model] + sorted_filters + + for filter in sorted_filters: + r = None + try: + urlIdx = filter["urlIdx"] + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + if key != "": + headers = {"Authorization": f"Bearer {key}"} + r = requests.post( + f"{url}/{filter['id']}/filter/outlet", + headers=headers, + json={ + "user": { + "id": user.id, + "name": user.name, + "email": user.email, + "role": user.role, + }, + "body": data, + }, + ) + + r.raise_for_status() + data = r.json() + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + if r is not None: + try: + res = r.json() + if "detail" in res: + return JSONResponse( + status_code=r.status_code, + content=res, + ) + except: + pass + + else: + pass + + __event_emitter__ = await get_event_emitter( + { + "chat_id": data["chat_id"], + "message_id": data["id"], + "session_id": data["session_id"], + } + ) + + __event_call__ = await get_event_call( + { + "chat_id": data["chat_id"], + "message_id": data["id"], + "session_id": data["session_id"], + } + ) + + def get_priority(function_id): + function = Functions.get_function_by_id(function_id) + if function is not None and hasattr(function, "valves"): + return (function.valves if function.valves else {}).get("priority", 0) + return 0 + + filter_ids = [function.id for function in Functions.get_global_filter_functions()] + if "info" in model and "meta" in model["info"]: + filter_ids.extend(model["info"]["meta"].get("filterIds", [])) + filter_ids = list(set(filter_ids)) + + enabled_filter_ids = [ + function.id + for function in Functions.get_functions_by_type("filter", active_only=True) + ] + filter_ids = [ + filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids + ] + + # Sort filter_ids by priority, using the get_priority function + filter_ids.sort(key=get_priority) + + for filter_id in filter_ids: + filter = Functions.get_function_by_id(filter_id) + if not filter: + continue + + if filter_id in webui_app.state.FUNCTIONS: + function_module = webui_app.state.FUNCTIONS[filter_id] + else: + function_module, _, _ = load_function_module_by_id(filter_id) + webui_app.state.FUNCTIONS[filter_id] = function_module + + if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): + valves = Functions.get_function_valves_by_id(filter_id) + function_module.valves = function_module.Valves( + **(valves if valves else {}) + ) + + if not hasattr(function_module, "outlet"): + continue + try: + outlet = function_module.outlet + + # Get the signature of the function + sig = inspect.signature(outlet) + params = {"body": data} + + # Extra parameters to be passed to the function + extra_params = { + "__model__": model, + "__id__": filter_id, + "__event_emitter__": __event_emitter__, + "__event_call__": __event_call__, + } + + # Add extra params in contained in function signature + for key, value in extra_params.items(): + if key in sig.parameters: + params[key] = value + + if "__user__" in sig.parameters: + __user__ = { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + } + + try: + if hasattr(function_module, "UserValves"): + __user__["valves"] = function_module.UserValves( + **Functions.get_user_valves_by_id_and_user_id( + filter_id, user.id + ) + ) + except Exception as e: + print(e) + + params = {**params, "__user__": __user__} + + if inspect.iscoroutinefunction(outlet): + data = await outlet(**params) + else: + data = outlet(**params) + + except Exception as e: + print(f"Error: {e}") + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"detail": str(e)}, + ) + + return data + + +@app.post("/api/chat/actions/{action_id}") +async def chat_completed( + action_id: str, form_data: dict, user=Depends(get_verified_user) +): + action = Functions.get_function_by_id(action_id) + if not action: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Action not found", + ) + + data = form_data + model_id = data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + model = app.state.MODELS[model_id] + + __event_emitter__ = await get_event_emitter( + { + "chat_id": data["chat_id"], + "message_id": data["id"], + "session_id": data["session_id"], + } + ) + __event_call__ = await get_event_call( + { + "chat_id": data["chat_id"], + "message_id": data["id"], + "session_id": data["session_id"], + } + ) + + if action_id in webui_app.state.FUNCTIONS: + function_module = webui_app.state.FUNCTIONS[action_id] + else: + function_module, _, _ = load_function_module_by_id(action_id) + webui_app.state.FUNCTIONS[action_id] = function_module + + if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): + valves = Functions.get_function_valves_by_id(action_id) + function_module.valves = function_module.Valves(**(valves if valves else {})) + + if hasattr(function_module, "action"): + try: + action = function_module.action + + # Get the signature of the function + sig = inspect.signature(action) + params = {"body": data} + + # Extra parameters to be passed to the function + extra_params = { + "__model__": model, + "__id__": action_id, + "__event_emitter__": __event_emitter__, + "__event_call__": __event_call__, + } + + # Add extra params in contained in function signature + for key, value in extra_params.items(): + if key in sig.parameters: + params[key] = value + + if "__user__" in sig.parameters: + __user__ = { + "id": user.id, + "email": user.email, + "name": user.name, + "role": user.role, + } + + try: + if hasattr(function_module, "UserValves"): + __user__["valves"] = function_module.UserValves( + **Functions.get_user_valves_by_id_and_user_id( + action_id, user.id + ) + ) + except Exception as e: + print(e) + + params = {**params, "__user__": __user__} + + if inspect.iscoroutinefunction(action): + data = await action(**params) + else: + data = action(**params) + + except Exception as e: + print(f"Error: {e}") + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"detail": str(e)}, + ) + + return data + + +################################## +# +# Task Endpoints +# +################################## + + +# TODO: Refactor task API endpoints below into a separate file + + +@app.get("/api/task/config") +async def get_task_config(user=Depends(get_verified_user)): + return { + "TASK_MODEL": app.state.config.TASK_MODEL, + "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL, + "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE, + "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, + "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, + "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, + } + + +class TaskConfigForm(BaseModel): + TASK_MODEL: Optional[str] + TASK_MODEL_EXTERNAL: Optional[str] + TITLE_GENERATION_PROMPT_TEMPLATE: str + SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: str + SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: int + TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE: str + + +@app.post("/api/task/config/update") +async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_user)): + app.state.config.TASK_MODEL = form_data.TASK_MODEL + app.state.config.TASK_MODEL_EXTERNAL = form_data.TASK_MODEL_EXTERNAL + app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = ( + form_data.TITLE_GENERATION_PROMPT_TEMPLATE + ) + app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = ( + form_data.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE + ) + app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = ( + form_data.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD + ) + app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = ( + form_data.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE + ) + + return { + "TASK_MODEL": app.state.config.TASK_MODEL, + "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL, + "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE, + "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, + "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, + "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, + } + + +@app.post("/api/task/title/completions") +async def generate_title(form_data: dict, user=Depends(get_verified_user)): + print("generate_title") + + model_id = form_data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + + # Check if the user has a custom task model + # If the user has a custom task model, use that model + model_id = get_task_model_id(model_id) + + print(model_id) + + template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE + + content = title_generation_template( + template, + form_data["prompt"], + { + "name": user.name, + "location": user.info.get("location") if user.info else None, + }, + ) + + payload = { + "model": model_id, + "messages": [{"role": "user", "content": content}], + "stream": False, + "max_tokens": 50, + "chat_id": form_data.get("chat_id", None), + "task": str(TASKS.TITLE_GENERATION), + } + + log.debug(payload) + + try: + payload = filter_pipeline(payload, user) + except Exception as e: + return JSONResponse( + status_code=e.args[0], + content={"detail": e.args[1]}, + ) + + if "chat_id" in payload: + del payload["chat_id"] + + return await generate_chat_completions(form_data=payload, user=user) + + +@app.post("/api/task/query/completions") +async def generate_search_query(form_data: dict, user=Depends(get_verified_user)): + print("generate_search_query") + + if len(form_data["prompt"]) < app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Skip search query generation for short prompts (< {app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD} characters)", + ) + + model_id = form_data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + + # Check if the user has a custom task model + # If the user has a custom task model, use that model + model_id = get_task_model_id(model_id) + + print(model_id) + + template = app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE + + content = search_query_generation_template( + template, form_data["prompt"], {"name": user.name} + ) + + payload = { + "model": model_id, + "messages": [{"role": "user", "content": content}], + "stream": False, + "max_tokens": 30, + "task": str(TASKS.QUERY_GENERATION), + } + + print(payload) + + try: + payload = filter_pipeline(payload, user) + except Exception as e: + return JSONResponse( + status_code=e.args[0], + content={"detail": e.args[1]}, + ) + + if "chat_id" in payload: + del payload["chat_id"] + + return await generate_chat_completions(form_data=payload, user=user) + + +@app.post("/api/task/emoji/completions") +async def generate_emoji(form_data: dict, user=Depends(get_verified_user)): + print("generate_emoji") + + model_id = form_data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + + # Check if the user has a custom task model + # If the user has a custom task model, use that model + model_id = get_task_model_id(model_id) + + print(model_id) + + template = ''' +Your task is to reflect the speaker's likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱). + +Message: """{{prompt}}""" +''' + + content = title_generation_template( + template, + form_data["prompt"], + { + "name": user.name, + "location": user.info.get("location") if user.info else None, + }, + ) + + payload = { + "model": model_id, + "messages": [{"role": "user", "content": content}], + "stream": False, + "max_tokens": 4, + "chat_id": form_data.get("chat_id", None), + "task": str(TASKS.EMOJI_GENERATION), + } + + log.debug(payload) + + try: + payload = filter_pipeline(payload, user) + except Exception as e: + return JSONResponse( + status_code=e.args[0], + content={"detail": e.args[1]}, + ) + + if "chat_id" in payload: + del payload["chat_id"] + + return await generate_chat_completions(form_data=payload, user=user) + + +@app.post("/api/task/tools/completions") +async def get_tools_function_calling(form_data: dict, user=Depends(get_verified_user)): + print("get_tools_function_calling") + + model_id = form_data["model"] + if model_id not in app.state.MODELS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Model not found", + ) + + # Check if the user has a custom task model + # If the user has a custom task model, use that model + model_id = get_task_model_id(model_id) + + print(model_id) + template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE + + try: + context, _, _ = await get_function_call_response( + form_data["messages"], + form_data.get("files", []), + form_data["tool_id"], + template, + model_id, + user, + ) + return context + except Exception as e: + return JSONResponse( + status_code=e.args[0], + content={"detail": e.args[1]}, + ) + + +################################## +# +# Pipelines Endpoints +# +################################## + + +# TODO: Refactor pipelines API endpoints below into a separate file + + +@app.get("/api/pipelines/list") +async def get_pipelines_list(user=Depends(get_admin_user)): + responses = await get_openai_models(raw=True) + + print(responses) + urlIdxs = [ + idx + for idx, response in enumerate(responses) + if response != None and "pipelines" in response + ] + + return { + "data": [ + { + "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx], + "idx": urlIdx, + } + for urlIdx in urlIdxs + ] + } + + +@app.post("/api/pipelines/upload") +async def upload_pipeline( + urlIdx: int = Form(...), file: UploadFile = File(...), user=Depends(get_admin_user) +): + print("upload_pipeline", urlIdx, file.filename) + # Check if the uploaded file is a python file + if not file.filename.endswith(".py"): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Only Python (.py) files are allowed.", + ) + + upload_folder = f"{CACHE_DIR}/pipelines" + os.makedirs(upload_folder, exist_ok=True) + file_path = os.path.join(upload_folder, file.filename) + + r = None + try: + # Save the uploaded file + with open(file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + + with open(file_path, "rb") as f: + files = {"file": f} + r = requests.post(f"{url}/pipelines/upload", headers=headers, files=files) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + status_code = status.HTTP_404_NOT_FOUND + if r is not None: + status_code = r.status_code + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=status_code, + detail=detail, + ) + finally: + # Ensure the file is deleted after the upload is completed or on failure + if os.path.exists(file_path): + os.remove(file_path) + + +class AddPipelineForm(BaseModel): + url: str + urlIdx: int + + +@app.post("/api/pipelines/add") +async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)): + + r = None + try: + urlIdx = form_data.urlIdx + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.post( + f"{url}/pipelines/add", headers=headers, json={"url": form_data.url} + ) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +class DeletePipelineForm(BaseModel): + id: str + urlIdx: int + + +@app.delete("/api/pipelines/delete") +async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)): + + r = None + try: + urlIdx = form_data.urlIdx + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.delete( + f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id} + ) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +@app.get("/api/pipelines") +async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)): + r = None + try: + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.get(f"{url}/pipelines", headers=headers) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +@app.get("/api/pipelines/{pipeline_id}/valves") +async def get_pipeline_valves( + urlIdx: Optional[int], + pipeline_id: str, + user=Depends(get_admin_user), +): + models = await get_all_models() + r = None + try: + + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +@app.get("/api/pipelines/{pipeline_id}/valves/spec") +async def get_pipeline_valves_spec( + urlIdx: Optional[int], + pipeline_id: str, + user=Depends(get_admin_user), +): + models = await get_all_models() + + r = None + try: + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +@app.post("/api/pipelines/{pipeline_id}/valves/update") +async def update_pipeline_valves( + urlIdx: Optional[int], + pipeline_id: str, + form_data: dict, + user=Depends(get_admin_user), +): + models = await get_all_models() + + r = None + try: + url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx] + key = openai_app.state.config.OPENAI_API_KEYS[urlIdx] + + headers = {"Authorization": f"Bearer {key}"} + r = requests.post( + f"{url}/{pipeline_id}/valves/update", + headers=headers, + json={**form_data}, + ) + + r.raise_for_status() + data = r.json() + + return {**data} + except Exception as e: + # Handle connection error here + print(f"Connection error: {e}") + + detail = "Pipeline not found" + + if r is not None: + try: + res = r.json() + if "detail" in res: + detail = res["detail"] + except: + pass + + raise HTTPException( + status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND), + detail=detail, + ) + + +################################## +# +# Config Endpoints +# +################################## + + +@app.get("/api/config") +async def get_app_config(): + return { + "status": True, + "name": WEBUI_NAME, + "version": VERSION, + "default_locale": str(DEFAULT_LOCALE), + "default_models": webui_app.state.config.DEFAULT_MODELS, + "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS, + "features": { + "auth": WEBUI_AUTH, + "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER), + "enable_signup": webui_app.state.config.ENABLE_SIGNUP, + "enable_login_form": webui_app.state.config.ENABLE_LOGIN_FORM, + "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH, + "enable_image_generation": images_app.state.config.ENABLED, + "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING, + "enable_admin_export": ENABLE_ADMIN_EXPORT, + }, + "audio": { + "tts": { + "engine": audio_app.state.config.TTS_ENGINE, + "voice": audio_app.state.config.TTS_VOICE, + }, + "stt": { + "engine": audio_app.state.config.STT_ENGINE, + }, + }, + "oauth": { + "providers": { + name: config.get("name", name) + for name, config in OAUTH_PROVIDERS.items() + } + }, + } + + +@app.get("/api/config/model/filter") +async def get_model_filter_config(user=Depends(get_admin_user)): + return { + "enabled": app.state.config.ENABLE_MODEL_FILTER, + "models": app.state.config.MODEL_FILTER_LIST, + } + + +class ModelFilterConfigForm(BaseModel): + enabled: bool + models: List[str] + + +@app.post("/api/config/model/filter") +async def update_model_filter_config( + form_data: ModelFilterConfigForm, user=Depends(get_admin_user) +): + app.state.config.ENABLE_MODEL_FILTER = form_data.enabled + app.state.config.MODEL_FILTER_LIST = form_data.models + + return { + "enabled": app.state.config.ENABLE_MODEL_FILTER, + "models": app.state.config.MODEL_FILTER_LIST, + } + + +# TODO: webhook endpoint should be under config endpoints + + +@app.get("/api/webhook") +async def get_webhook_url(user=Depends(get_admin_user)): + return { + "url": app.state.config.WEBHOOK_URL, + } + + +class UrlForm(BaseModel): + url: str + + +@app.post("/api/webhook") +async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)): + app.state.config.WEBHOOK_URL = form_data.url + webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL + return {"url": app.state.config.WEBHOOK_URL} + + +@app.get("/api/version") +async def get_app_config(): + return { + "version": VERSION, + } + + +@app.get("/api/changelog") +async def get_app_changelog(): + return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5} + + +@app.get("/api/version/updates") +async def get_app_latest_release_version(): + try: + async with aiohttp.ClientSession(trust_env=True) as session: + async with session.get( + "https://api.github.com/repos/open-webui/open-webui/releases/latest" + ) as response: + response.raise_for_status() + data = await response.json() + latest_version = data["tag_name"] + + return {"current": VERSION, "latest": latest_version[1:]} + except aiohttp.ClientError as e: + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED, + ) + + +############################ +# OAuth Login & Callback +############################ + +oauth = OAuth() + +for provider_name, provider_config in OAUTH_PROVIDERS.items(): + oauth.register( + name=provider_name, + client_id=provider_config["client_id"], + client_secret=provider_config["client_secret"], + server_metadata_url=provider_config["server_metadata_url"], + client_kwargs={ + "scope": provider_config["scope"], + }, + ) + +# SessionMiddleware is used by authlib for oauth +if len(OAUTH_PROVIDERS) > 0: + app.add_middleware( + SessionMiddleware, + secret_key=WEBUI_SECRET_KEY, + session_cookie="oui-session", + same_site=WEBUI_SESSION_COOKIE_SAME_SITE, + https_only=WEBUI_SESSION_COOKIE_SECURE, + ) + + +@app.get("/oauth/{provider}/login") +async def oauth_login(provider: str, request: Request): + if provider not in OAUTH_PROVIDERS: + raise HTTPException(404) + redirect_uri = request.url_for("oauth_callback", provider=provider) + return await oauth.create_client(provider).authorize_redirect(request, redirect_uri) + + +# OAuth login logic is as follows: +# 1. Attempt to find a user with matching subject ID, tied to the provider +# 2. If OAUTH_MERGE_ACCOUNTS_BY_EMAIL is true, find a user with the email address provided via OAuth +# - This is considered insecure in general, as OAuth providers do not always verify email addresses +# 3. If there is no user, and ENABLE_OAUTH_SIGNUP is true, create a user +# - Email addresses are considered unique, so we fail registration if the email address is alreayd taken +@app.get("/oauth/{provider}/callback") +async def oauth_callback(provider: str, request: Request, response: Response): + if provider not in OAUTH_PROVIDERS: + raise HTTPException(404) + client = oauth.create_client(provider) + try: + token = await client.authorize_access_token(request) + except Exception as e: + log.warning(f"OAuth callback error: {e}") + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + user_data: UserInfo = token["userinfo"] + + sub = user_data.get("sub") + if not sub: + log.warning(f"OAuth callback failed, sub is missing: {user_data}") + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + provider_sub = f"{provider}@{sub}" + email = user_data.get("email", "").lower() + # We currently mandate that email addresses are provided + if not email: + log.warning(f"OAuth callback failed, email is missing: {user_data}") + raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED) + + # Check if the user exists + user = Users.get_user_by_oauth_sub(provider_sub) + + if not user: + # If the user does not exist, check if merging is enabled + if OAUTH_MERGE_ACCOUNTS_BY_EMAIL.value: + # Check if the user exists by email + user = Users.get_user_by_email(email) + if user: + # Update the user with the new oauth sub + Users.update_user_oauth_sub_by_id(user.id, provider_sub) + + if not user: + # If the user does not exist, check if signups are enabled + if ENABLE_OAUTH_SIGNUP.value: + # Check if an existing user with the same email already exists + existing_user = Users.get_user_by_email(user_data.get("email", "").lower()) + if existing_user: + raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN) + + picture_claim = webui_app.state.config.OAUTH_PICTURE_CLAIM + picture_url = user_data.get(picture_claim, "") + if picture_url: + # Download the profile image into a base64 string + try: + async with aiohttp.ClientSession() as session: + async with session.get(picture_url) as resp: + picture = await resp.read() + base64_encoded_picture = base64.b64encode(picture).decode( + "utf-8" + ) + guessed_mime_type = mimetypes.guess_type(picture_url)[0] + if guessed_mime_type is None: + # assume JPG, browsers are tolerant enough of image formats + guessed_mime_type = "image/jpeg" + picture_url = f"data:{guessed_mime_type};base64,{base64_encoded_picture}" + except Exception as e: + log.error(f"Error downloading profile image '{picture_url}': {e}") + picture_url = "" + if not picture_url: + picture_url = "/user.png" + username_claim = webui_app.state.config.OAUTH_USERNAME_CLAIM + role = ( + "admin" + if Users.get_num_users() == 0 + else webui_app.state.config.DEFAULT_USER_ROLE + ) + user = Auths.insert_new_auth( + email=email, + password=get_password_hash( + str(uuid.uuid4()) + ), # Random password, not used + name=user_data.get(username_claim, "User"), + profile_image_url=picture_url, + role=role, + oauth_sub=provider_sub, + ) + + if webui_app.state.config.WEBHOOK_URL: + post_webhook( + webui_app.state.config.WEBHOOK_URL, + WEBHOOK_MESSAGES.USER_SIGNUP(user.name), + { + "action": "signup", + "message": WEBHOOK_MESSAGES.USER_SIGNUP(user.name), + "user": user.model_dump_json(exclude_none=True), + }, + ) + else: + raise HTTPException( + status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED + ) + + jwt_token = create_token( + data={"id": user.id}, + expires_delta=parse_duration(webui_app.state.config.JWT_EXPIRES_IN), + ) + + # Set the cookie token + response.set_cookie( + key="token", + value=jwt_token, + httponly=True, # Ensures the cookie is not accessible via JavaScript + ) + + # Redirect back to the frontend with the JWT token + redirect_url = f"{request.base_url}auth#token={jwt_token}" + return RedirectResponse(url=redirect_url) + + +@app.get("/manifest.json") +async def get_manifest_json(): + return { + "name": WEBUI_NAME, + "short_name": WEBUI_NAME, + "start_url": "/", + "display": "standalone", + "background_color": "#343541", + "orientation": "portrait-primary", + "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}], + } + + +@app.get("/opensearch.xml") +async def get_opensearch_xml(): + xml_content = rf""" + + {WEBUI_NAME} + Search {WEBUI_NAME} + UTF-8 + {WEBUI_URL}/static/favicon.png + + {WEBUI_URL} + + """ + return Response(content=xml_content, media_type="application/xml") + + +@app.get("/health") +async def healthcheck(): + return {"status": True} + + +@app.get("/health/db") +async def healthcheck_with_db(): + Session.execute(text("SELECT 1;")).all() + return {"status": True} + + +app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static") +app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache") + +if os.path.exists(FRONTEND_BUILD_DIR): + mimetypes.add_type("text/javascript", ".js") + app.mount( + "/", + SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True), + name="spa-static-files", + ) +else: + log.warning( + f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only." + ) diff --git a/backend/migrations/README b/backend/migrations/README new file mode 100644 index 0000000000000000000000000000000000000000..f1d93dff9dbd52e0a9fc8ce4d68e0784c07da697 --- /dev/null +++ b/backend/migrations/README @@ -0,0 +1,4 @@ +Generic single-database configuration. + +Create new migrations with +DATABASE_URL= alembic revision --autogenerate -m "a description" diff --git a/backend/migrations/env.py b/backend/migrations/env.py new file mode 100644 index 0000000000000000000000000000000000000000..8046abff3a597ca42604ab5a8557d29b1f8871e5 --- /dev/null +++ b/backend/migrations/env.py @@ -0,0 +1,96 @@ +import os +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from apps.webui.models.auths import Auth +from apps.webui.models.chats import Chat +from apps.webui.models.documents import Document +from apps.webui.models.memories import Memory +from apps.webui.models.models import Model +from apps.webui.models.prompts import Prompt +from apps.webui.models.tags import Tag, ChatIdTag +from apps.webui.models.tools import Tool +from apps.webui.models.users import User +from apps.webui.models.files import File +from apps.webui.models.functions import Function + +from config import DATABASE_URL + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name, disable_existing_loggers=False) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Auth.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +DB_URL = DATABASE_URL + +if DB_URL: + config.set_main_option("sqlalchemy.url", DB_URL.replace("%", "%%")) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/migrations/script.py.mako b/backend/migrations/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..5f667ccfe0b7732421efa90d85ec2790f3603b04 --- /dev/null +++ b/backend/migrations/script.py.mako @@ -0,0 +1,27 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import apps.webui.internal.db +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/migrations/util.py b/backend/migrations/util.py new file mode 100644 index 0000000000000000000000000000000000000000..401bb94d03425aca522163c7c492405d953c9eea --- /dev/null +++ b/backend/migrations/util.py @@ -0,0 +1,9 @@ +from alembic import op +from sqlalchemy import Inspector + + +def get_existing_tables(): + con = op.get_bind() + inspector = Inspector.from_engine(con) + tables = set(inspector.get_table_names()) + return tables diff --git a/backend/migrations/versions/7e5b5dc7342b_init.py b/backend/migrations/versions/7e5b5dc7342b_init.py new file mode 100644 index 0000000000000000000000000000000000000000..b82627f5bc286da05d2f9f1fdf6005f00f91ed55 --- /dev/null +++ b/backend/migrations/versions/7e5b5dc7342b_init.py @@ -0,0 +1,202 @@ +"""init + +Revision ID: 7e5b5dc7342b +Revises: +Create Date: 2024-06-24 13:15:33.808998 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import apps.webui.internal.db +from migrations.util import get_existing_tables + +# revision identifiers, used by Alembic. +revision: str = "7e5b5dc7342b" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + existing_tables = set(get_existing_tables()) + + # ### commands auto generated by Alembic - please adjust! ### + if "auth" not in existing_tables: + op.create_table( + "auth", + sa.Column("id", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=True), + sa.Column("password", sa.Text(), nullable=True), + sa.Column("active", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "chat" not in existing_tables: + op.create_table( + "chat", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("chat", sa.Text(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("share_id", sa.Text(), nullable=True), + sa.Column("archived", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("share_id"), + ) + + if "chatidtag" not in existing_tables: + op.create_table( + "chatidtag", + sa.Column("id", sa.String(), nullable=False), + sa.Column("tag_name", sa.String(), nullable=True), + sa.Column("chat_id", sa.String(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "document" not in existing_tables: + op.create_table( + "document", + sa.Column("collection_name", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("filename", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("collection_name"), + sa.UniqueConstraint("name"), + ) + + if "file" not in existing_tables: + op.create_table( + "file", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("filename", sa.Text(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "function" not in existing_tables: + op.create_table( + "function", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("type", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("valves", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=True), + sa.Column("is_global", sa.Boolean(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "memory" not in existing_tables: + op.create_table( + "memory", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "model" not in existing_tables: + op.create_table( + "model", + sa.Column("id", sa.Text(), nullable=False), + sa.Column("user_id", sa.Text(), nullable=True), + sa.Column("base_model_id", sa.Text(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("params", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "prompt" not in existing_tables: + op.create_table( + "prompt", + sa.Column("command", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("command"), + ) + + if "tag" not in existing_tables: + op.create_table( + "tag", + sa.Column("id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("data", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "tool" not in existing_tables: + op.create_table( + "tool", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("specs", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("valves", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "user" not in existing_tables: + op.create_table( + "user", + sa.Column("id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("email", sa.String(), nullable=True), + sa.Column("role", sa.String(), nullable=True), + sa.Column("profile_image_url", sa.Text(), nullable=True), + sa.Column("last_active_at", sa.BigInteger(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.Column("api_key", sa.String(), nullable=True), + sa.Column("settings", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("info", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("oauth_sub", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("api_key"), + sa.UniqueConstraint("oauth_sub"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("user") + op.drop_table("tool") + op.drop_table("tag") + op.drop_table("prompt") + op.drop_table("model") + op.drop_table("memory") + op.drop_table("function") + op.drop_table("file") + op.drop_table("document") + op.drop_table("chatidtag") + op.drop_table("chat") + op.drop_table("auth") + # ### end Alembic commands ### diff --git a/backend/open_webui/__init__.py b/backend/open_webui/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1defac8247fe72890702f2f36122fd71c8b39f64 --- /dev/null +++ b/backend/open_webui/__init__.py @@ -0,0 +1,60 @@ +import base64 +import os +import random +from pathlib import Path + +import typer +import uvicorn + +app = typer.Typer() + +KEY_FILE = Path.cwd() / ".webui_secret_key" +if (frontend_build_dir := Path(__file__).parent / "frontend").exists(): + os.environ["FRONTEND_BUILD_DIR"] = str(frontend_build_dir) + + +@app.command() +def serve( + host: str = "0.0.0.0", + port: int = 8080, +): + if os.getenv("WEBUI_SECRET_KEY") is None: + typer.echo( + "Loading WEBUI_SECRET_KEY from file, not provided as an environment variable." + ) + if not KEY_FILE.exists(): + typer.echo(f"Generating a new secret key and saving it to {KEY_FILE}") + KEY_FILE.write_bytes(base64.b64encode(random.randbytes(12))) + typer.echo(f"Loading WEBUI_SECRET_KEY from {KEY_FILE}") + os.environ["WEBUI_SECRET_KEY"] = KEY_FILE.read_text() + + if os.getenv("USE_CUDA_DOCKER", "false") == "true": + typer.echo( + "CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries." + ) + LD_LIBRARY_PATH = os.getenv("LD_LIBRARY_PATH", "").split(":") + os.environ["LD_LIBRARY_PATH"] = ":".join( + LD_LIBRARY_PATH + + [ + "/usr/local/lib/python3.11/site-packages/torch/lib", + "/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib", + ] + ) + import main # we need set environment variables before importing main + + uvicorn.run(main.app, host=host, port=port, forwarded_allow_ips="*") + + +@app.command() +def dev( + host: str = "0.0.0.0", + port: int = 8080, + reload: bool = True, +): + uvicorn.run( + "main:app", host=host, port=port, reload=reload, forwarded_allow_ips="*" + ) + + +if __name__ == "__main__": + app() diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b12854a081d8c2795c70e87e30abf7075552b1a --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,79 @@ +fastapi==0.111.0 +uvicorn[standard]==0.22.0 +pydantic==2.8.2 +python-multipart==0.0.9 + +Flask==3.0.3 +Flask-Cors==4.0.1 + +python-socketio==5.11.3 +python-jose==3.3.0 +passlib[bcrypt]==1.7.4 + +requests==2.32.3 +aiohttp==3.9.5 +sqlalchemy==2.0.31 +alembic==1.13.2 +peewee==3.17.6 +peewee-migrate==1.12.2 +psycopg2-binary==2.9.9 +PyMySQL==1.1.1 +bcrypt==4.1.3 +SQLAlchemy +pymongo +redis +boto3==1.34.110 + +argon2-cffi==23.1.0 +APScheduler==3.10.4 + +# AI libraries +openai +anthropic +google-generativeai==0.7.2 +tiktoken + +langchain==0.2.11 +langchain-community==0.2.10 +langchain-chroma==0.1.2 + +fake-useragent==1.5.1 +chromadb==0.5.4 +sentence-transformers==3.0.1 +pypdf==4.2.0 +docx2txt==0.8 +python-pptx==0.6.23 +unstructured==0.15.0 +Markdown==3.6 +pypandoc==1.13 +pandas==2.2.2 +openpyxl==3.1.5 +pyxlsb==1.0.10 +xlrd==2.0.1 +validators==0.28.1 +psutil + +opencv-python-headless==4.10.0.84 +rapidocr-onnxruntime==1.3.24 + +fpdf2==2.7.9 +rank-bm25==0.2.2 + +faster-whisper==1.0.2 + +PyJWT[crypto]==2.8.0 +authlib==1.3.1 + +black==24.4.2 +langfuse==2.39.2 +youtube-transcript-api==0.6.2 +pytube==15.0.0 + +extract_msg +pydub +duckduckgo-search~=6.2.1 + +## Tests +docker~=7.1.0 +pytest~=8.2.2 +pytest-docker~=3.1.1 diff --git a/backend/start.sh b/backend/start.sh new file mode 100755 index 0000000000000000000000000000000000000000..aacc6daf10c5c544a146b6c3a2f5848c7c265a3d --- /dev/null +++ b/backend/start.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +cd "$SCRIPT_DIR" || exit + +KEY_FILE=.webui_secret_key + +PORT="${PORT:-8080}" +HOST="${HOST:-0.0.0.0}" +if test "$WEBUI_SECRET_KEY $WEBUI_JWT_SECRET_KEY" = " "; then + echo "Loading WEBUI_SECRET_KEY from file, not provided as an environment variable." + + if ! [ -e "$KEY_FILE" ]; then + echo "Generating WEBUI_SECRET_KEY" + # Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one. + echo $(head -c 12 /dev/random | base64) > "$KEY_FILE" + fi + + echo "Loading WEBUI_SECRET_KEY from $KEY_FILE" + WEBUI_SECRET_KEY=$(cat "$KEY_FILE") +fi + +if [[ "${USE_OLLAMA_DOCKER,,}" == "true" ]]; then + echo "USE_OLLAMA is set to true, starting ollama serve." + ollama serve & +fi + +if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then + echo "CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries." + export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib" +fi + + +# Check if SPACE_ID is set, if so, configure for space +if [ -n "$SPACE_ID" ]; then + echo "Configuring for HuggingFace Space deployment" + if [ -n "$ADMIN_USER_EMAIL" ] && [ -n "$ADMIN_USER_PASSWORD" ]; then + echo "Admin user configured, creating" + WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' & + webui_pid=$! + echo "Waiting for webui to start..." + while ! curl -s http://localhost:8080/health > /dev/null; do + sleep 1 + done + echo "Creating admin user..." + curl \ + -X POST "http://localhost:8080/api/v1/auths/signup" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ + -d "{ \"email\": \"${ADMIN_USER_EMAIL}\", \"password\": \"${ADMIN_USER_PASSWORD}\", \"name\": \"Admin\" }" + echo "Shutting down webui..." + kill $webui_pid + fi + + if [ -n "$OAUTH_CLIENT_ID" ]; then + echo "OAuth client ID provided, configuring for OAuth" + export OPENID_PROVIDER_URL=${OPENID_PROVIDER_URL}/.well-known/openid-configuration + export OAUTH_PROVIDER_NAME="Hugging Face" + fi + + export WEBUI_URL=${SPACE_HOST} +fi + +WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' diff --git a/backend/start_windows.bat b/backend/start_windows.bat new file mode 100644 index 0000000000000000000000000000000000000000..b2498f9c2b0588c251d142b0b881440d6f191c5a --- /dev/null +++ b/backend/start_windows.bat @@ -0,0 +1,33 @@ +:: This method is not recommended, and we recommend you use the `start.sh` file with WSL instead. +@echo off +SETLOCAL ENABLEDELAYEDEXPANSION + +:: Get the directory of the current script +SET "SCRIPT_DIR=%~dp0" +cd /d "%SCRIPT_DIR%" || exit /b + +SET "KEY_FILE=.webui_secret_key" +IF "%PORT%"=="" SET PORT=8080 +IF "%HOST%"=="" SET HOST=0.0.0.0 +SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%" +SET "WEBUI_JWT_SECRET_KEY=%WEBUI_JWT_SECRET_KEY%" + +:: Check if WEBUI_SECRET_KEY and WEBUI_JWT_SECRET_KEY are not set +IF "%WEBUI_SECRET_KEY%%WEBUI_JWT_SECRET_KEY%" == " " ( + echo Loading WEBUI_SECRET_KEY from file, not provided as an environment variable. + + IF NOT EXIST "%KEY_FILE%" ( + echo Generating WEBUI_SECRET_KEY + :: Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one + SET /p WEBUI_SECRET_KEY=>%KEY_FILE% + echo WEBUI_SECRET_KEY generated + ) + + echo Loading WEBUI_SECRET_KEY from %KEY_FILE% + SET /p WEBUI_SECRET_KEY=<%KEY_FILE% +) + +:: Execute uvicorn +SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%" +uvicorn main:app --host "%HOST%" --port "%PORT%" --forwarded-allow-ips '*' diff --git a/backend/static/favicon.png b/backend/static/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..2b2074780847581edf9cf2ed0d2e9ebd8ff08c56 Binary files /dev/null and b/backend/static/favicon.png differ diff --git a/backend/static/fonts/NotoSans-Bold.ttf b/backend/static/fonts/NotoSans-Bold.ttf new file mode 100644 index 0000000000000000000000000000000000000000..56310ad1ad635e6ac20478daff20f14f0647e3ed --- /dev/null +++ b/backend/static/fonts/NotoSans-Bold.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf382cad35e731fc4f13b1bf068c5085cd17bee2141014cc94919c140529488d +size 582604 diff --git a/backend/static/fonts/NotoSans-Italic.ttf b/backend/static/fonts/NotoSans-Italic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..1ad4f126bc92b2188effdf3f0db8e4a260ecc5c4 --- /dev/null +++ b/backend/static/fonts/NotoSans-Italic.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:380a500e3dda76d955dadc77053227cc61149814737dc9f7d973d09415ad851f +size 597000 diff --git a/backend/static/fonts/NotoSans-Regular.ttf b/backend/static/fonts/NotoSans-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..defecf9b7931c29b64603ca1eed7eb4f513d42ad --- /dev/null +++ b/backend/static/fonts/NotoSans-Regular.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3be6b371cef19ed6add589bd106444ab74c9793bc812d3159298b73d00ee011c +size 582748 diff --git a/backend/static/fonts/NotoSansJP-Regular.ttf b/backend/static/fonts/NotoSansJP-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..34e480073b8a55f1e9476c669acde48e049bd0b5 --- /dev/null +++ b/backend/static/fonts/NotoSansJP-Regular.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb3df01b4182734d021d79ec5bac17903bb681e926a059c59ed81a373d612241 +size 5732824 diff --git a/backend/static/fonts/NotoSansKR-Regular.ttf b/backend/static/fonts/NotoSansKR-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..c847342886ba5d1e326844ba2d679b9d0bdaa805 --- /dev/null +++ b/backend/static/fonts/NotoSansKR-Regular.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9db318b65ee9c575a43e7efd273dbdd1afef26e467eea3e1073a50e1a6595f6d +size 6192764 diff --git a/backend/static/logo.png b/backend/static/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..519af1db620dbf4de3694660dae7abd7392f0b3c Binary files /dev/null and b/backend/static/logo.png differ diff --git a/backend/static/splash.png b/backend/static/splash.png new file mode 100644 index 0000000000000000000000000000000000000000..389196ca6a364b9e4b7daa0fc13be463b914b251 Binary files /dev/null and b/backend/static/splash.png differ diff --git a/backend/static/user-import.csv b/backend/static/user-import.csv new file mode 100644 index 0000000000000000000000000000000000000000..918a92aad71d708ae13fedb8b91f79c29a5b3e9d --- /dev/null +++ b/backend/static/user-import.csv @@ -0,0 +1 @@ +Name,Email,Password,Role diff --git a/backend/test/__init__.py b/backend/test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/apps/webui/routers/test_auths.py b/backend/test/apps/webui/routers/test_auths.py new file mode 100644 index 0000000000000000000000000000000000000000..3a8695a6935f98e8c3c3ca4f59694fcf6ea3f147 --- /dev/null +++ b/backend/test/apps/webui/routers/test_auths.py @@ -0,0 +1,202 @@ +import pytest + +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestAuths(AbstractPostgresTest): + BASE_PATH = "/api/v1/auths" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.users import Users + from apps.webui.models.auths import Auths + + cls.users = Users + cls.auths = Auths + + def test_get_session_user(self): + with mock_webui_user(): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert response.json() == { + "id": "1", + "name": "John Doe", + "email": "john.doe@openwebui.com", + "role": "user", + "profile_image_url": "/user.png", + } + + def test_update_profile(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("old_password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + + with mock_webui_user(id=user.id): + response = self.fast_api_client.post( + self.create_url("/update/profile"), + json={"name": "John Doe 2", "profile_image_url": "/user2.png"}, + ) + assert response.status_code == 200 + db_user = self.users.get_user_by_id(user.id) + assert db_user.name == "John Doe 2" + assert db_user.profile_image_url == "/user2.png" + + def test_update_password(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("old_password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + + with mock_webui_user(id=user.id): + response = self.fast_api_client.post( + self.create_url("/update/password"), + json={"password": "old_password", "new_password": "new_password"}, + ) + assert response.status_code == 200 + + old_auth = self.auths.authenticate_user( + "john.doe@openwebui.com", "old_password" + ) + assert old_auth is None + new_auth = self.auths.authenticate_user( + "john.doe@openwebui.com", "new_password" + ) + assert new_auth is not None + + def test_signin(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + response = self.fast_api_client.post( + self.create_url("/signin"), + json={"email": "john.doe@openwebui.com", "password": "password"}, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] == user.id + assert data["name"] == "John Doe" + assert data["email"] == "john.doe@openwebui.com" + assert data["role"] == "user" + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_signup(self): + response = self.fast_api_client.post( + self.create_url("/signup"), + json={ + "name": "John Doe", + "email": "john.doe@openwebui.com", + "password": "password", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] is not None and len(data["id"]) > 0 + assert data["name"] == "John Doe" + assert data["email"] == "john.doe@openwebui.com" + assert data["role"] in ["admin", "user", "pending"] + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_add_user(self): + with mock_webui_user(): + response = self.fast_api_client.post( + self.create_url("/add"), + json={ + "name": "John Doe 2", + "email": "john.doe2@openwebui.com", + "password": "password2", + "role": "admin", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] is not None and len(data["id"]) > 0 + assert data["name"] == "John Doe 2" + assert data["email"] == "john.doe2@openwebui.com" + assert data["role"] == "admin" + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_get_admin_details(self): + self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + with mock_webui_user(): + response = self.fast_api_client.get(self.create_url("/admin/details")) + + assert response.status_code == 200 + assert response.json() == { + "name": "John Doe", + "email": "john.doe@openwebui.com", + } + + def test_create_api_key_(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + with mock_webui_user(id=user.id): + response = self.fast_api_client.post(self.create_url("/api_key")) + assert response.status_code == 200 + data = response.json() + assert data["api_key"] is not None + assert len(data["api_key"]) > 0 + + def test_delete_api_key(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + self.users.update_user_api_key_by_id(user.id, "abc") + with mock_webui_user(id=user.id): + response = self.fast_api_client.delete(self.create_url("/api_key")) + assert response.status_code == 200 + assert response.json() == True + db_user = self.users.get_user_by_id(user.id) + assert db_user.api_key is None + + def test_get_api_key(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + self.users.update_user_api_key_by_id(user.id, "abc") + with mock_webui_user(id=user.id): + response = self.fast_api_client.get(self.create_url("/api_key")) + assert response.status_code == 200 + assert response.json() == {"api_key": "abc"} diff --git a/backend/test/apps/webui/routers/test_chats.py b/backend/test/apps/webui/routers/test_chats.py new file mode 100644 index 0000000000000000000000000000000000000000..f4661b62573a5891daa647b423b3bfdc540ac031 --- /dev/null +++ b/backend/test/apps/webui/routers/test_chats.py @@ -0,0 +1,238 @@ +import uuid + +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestChats(AbstractPostgresTest): + + BASE_PATH = "/api/v1/chats" + + def setup_class(cls): + super().setup_class() + + def setup_method(self): + super().setup_method() + from apps.webui.models.chats import ChatForm + from apps.webui.models.chats import Chats + + self.chats = Chats + self.chats.insert_new_chat( + "2", + ChatForm( + **{ + "chat": { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + } + ), + ) + + def test_get_session_user_chat_list(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_delete_all_user_chats(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url("/")) + assert response.status_code == 200 + assert len(self.chats.get_chats()) == 0 + + def test_get_user_chat_list_by_user_id(self): + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("/list/user/2")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_create_new_chat(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/new"), + json={ + "chat": { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag1", "tag2"], + } + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["archived"] is False + assert data["chat"] == { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag1", "tag2"], + } + assert data["user_id"] == "2" + assert data["id"] is not None + assert data["share_id"] is None + assert data["title"] == "New Chat" + assert data["updated_at"] is not None + assert data["created_at"] is not None + assert len(self.chats.get_chats()) == 2 + + def test_get_user_chats(self): + self.test_get_session_user_chat_list() + + def test_get_user_archived_chats(self): + self.chats.archive_all_chats_by_user_id("2") + from apps.webui.internal.db import Session + + Session.commit() + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/all/archived")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_get_all_user_chats_in_db(self): + with mock_webui_user(id="4"): + response = self.fast_api_client.get(self.create_url("/all/db")) + assert response.status_code == 200 + assert len(response.json()) == 1 + + def test_get_archived_session_user_chat_list(self): + self.test_get_user_archived_chats() + + def test_archive_all_chats(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.post(self.create_url("/archive/all")) + assert response.status_code == 200 + assert len(self.chats.get_archived_chats_by_user_id("2")) == 1 + + def test_get_shared_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + self.chats.update_chat_share_id_by_id(chat_id, chat_id) + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/share/{chat_id}")) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + assert data["id"] == chat_id + assert data["share_id"] == chat_id + assert data["title"] == "New Chat" + + def test_get_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}")) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + assert data["share_id"] is None + assert data["title"] == "New Chat" + assert data["user_id"] == "2" + + def test_update_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url(f"/{chat_id}"), + json={ + "chat": { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag2", "tag4"], + "title": "Just another title", + } + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat2", + "title": "Just another title", + "description": "chat2 description", + "tags": ["tag2", "tag4"], + "history": {"currentId": "1", "messages": []}, + } + assert data["share_id"] is None + assert data["title"] == "Just another title" + assert data["user_id"] == "2" + + def test_delete_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url(f"/{chat_id}")) + assert response.status_code == 200 + assert response.json() is True + + def test_clone_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}/clone")) + + assert response.status_code == 200 + data = response.json() + assert data["id"] != chat_id + assert data["chat"] == { + "branchPointMessageId": "1", + "description": "chat1 description", + "history": {"currentId": "1", "messages": []}, + "name": "chat1", + "originalChatId": chat_id, + "tags": ["tag1", "tag2"], + "title": "Clone of New Chat", + } + assert data["share_id"] is None + assert data["title"] == "Clone of New Chat" + assert data["user_id"] == "2" + + def test_archive_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}/archive")) + assert response.status_code == 200 + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.archived is True + + def test_share_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.post(self.create_url(f"/{chat_id}/share")) + assert response.status_code == 200 + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.share_id is not None + + def test_delete_shared_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + share_id = str(uuid.uuid4()) + self.chats.update_chat_share_id_by_id(chat_id, share_id) + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url(f"/{chat_id}/share")) + assert response.status_code + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.share_id is None diff --git a/backend/test/apps/webui/routers/test_documents.py b/backend/test/apps/webui/routers/test_documents.py new file mode 100644 index 0000000000000000000000000000000000000000..14ca339fd0b3332c9460865587cb2ad99e671cd6 --- /dev/null +++ b/backend/test/apps/webui/routers/test_documents.py @@ -0,0 +1,106 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestDocuments(AbstractPostgresTest): + + BASE_PATH = "/api/v1/documents" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.documents import Documents + + cls.documents = Documents + + def test_documents(self): + # Empty database + assert len(self.documents.get_docs()) == 0 + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + # Create a new document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "name": "doc_name", + "title": "doc title", + "collection_name": "custom collection", + "filename": "doc_name.pdf", + "content": "", + }, + ) + assert response.status_code == 200 + assert response.json()["name"] == "doc_name" + assert len(self.documents.get_docs()) == 1 + + # Get the document + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/doc?name=doc_name")) + assert response.status_code == 200 + data = response.json() + assert data["collection_name"] == "custom collection" + assert data["name"] == "doc_name" + assert data["title"] == "doc title" + assert data["filename"] == "doc_name.pdf" + assert data["content"] == {} + + # Create another document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "name": "doc_name 2", + "title": "doc title 2", + "collection_name": "custom collection 2", + "filename": "doc_name2.pdf", + "content": "", + }, + ) + assert response.status_code == 200 + assert response.json()["name"] == "doc_name 2" + assert len(self.documents.get_docs()) == 2 + + # Get all documents + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 2 + + # Update the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/doc/update?name=doc_name"), + json={"name": "doc_name rework", "title": "updated title"}, + ) + assert response.status_code == 200 + data = response.json() + assert data["name"] == "doc_name rework" + assert data["title"] == "updated title" + + # Tag the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/doc/tags"), + json={ + "name": "doc_name rework", + "tags": [{"name": "testing-tag"}, {"name": "another-tag"}], + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["name"] == "doc_name rework" + assert data["content"] == { + "tags": [{"name": "testing-tag"}, {"name": "another-tag"}] + } + assert len(self.documents.get_docs()) == 2 + + # Delete the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/doc/delete?name=doc_name rework") + ) + assert response.status_code == 200 + assert len(self.documents.get_docs()) == 1 diff --git a/backend/test/apps/webui/routers/test_models.py b/backend/test/apps/webui/routers/test_models.py new file mode 100644 index 0000000000000000000000000000000000000000..410c4516a23f7af14d7425b504966edc873271ac --- /dev/null +++ b/backend/test/apps/webui/routers/test_models.py @@ -0,0 +1,62 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestModels(AbstractPostgresTest): + + BASE_PATH = "/api/v1/models" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.models import Model + + cls.models = Model + + def test_models(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/add"), + json={ + "id": "my-model", + "base_model_id": "base-model-id", + "name": "Hello World", + "meta": { + "profile_image_url": "/static/favicon.png", + "description": "description", + "capabilities": None, + "model_config": {}, + }, + "params": {}, + }, + ) + assert response.status_code == 200 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 1 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get( + self.create_url(query_params={"id": "my-model"}) + ) + assert response.status_code == 200 + data = response.json()[0] + assert data["id"] == "my-model" + assert data["name"] == "Hello World" + + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/delete?id=my-model") + ) + assert response.status_code == 200 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 diff --git a/backend/test/apps/webui/routers/test_prompts.py b/backend/test/apps/webui/routers/test_prompts.py new file mode 100644 index 0000000000000000000000000000000000000000..9f47be9923c11206612b473bb0334c9e80e2f2d3 --- /dev/null +++ b/backend/test/apps/webui/routers/test_prompts.py @@ -0,0 +1,92 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestPrompts(AbstractPostgresTest): + + BASE_PATH = "/api/v1/prompts" + + def test_prompts(self): + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + # Create a two new prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "command": "/my-command", + "title": "Hello World", + "content": "description", + }, + ) + assert response.status_code == 200 + with mock_webui_user(id="3"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "command": "/my-command2", + "title": "Hello World 2", + "content": "description 2", + }, + ) + assert response.status_code == 200 + + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 2 + + # Get prompt by command + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/command/my-command")) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command" + assert data["title"] == "Hello World" + assert data["content"] == "description" + assert data["user_id"] == "2" + + # Update prompt + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/command/my-command2/update"), + json={ + "command": "irrelevant for request", + "title": "Hello World Updated", + "content": "description Updated", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command2" + assert data["title"] == "Hello World Updated" + assert data["content"] == "description Updated" + assert data["user_id"] == "3" + + # Get prompt by command + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/command/my-command2")) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command2" + assert data["title"] == "Hello World Updated" + assert data["content"] == "description Updated" + assert data["user_id"] == "3" + + # Delete prompt + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/command/my-command/delete") + ) + assert response.status_code == 200 + + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 1 diff --git a/backend/test/apps/webui/routers/test_users.py b/backend/test/apps/webui/routers/test_users.py new file mode 100644 index 0000000000000000000000000000000000000000..9736b4d32a6df5b8cf291c420d1e4b4c4fc61f96 --- /dev/null +++ b/backend/test/apps/webui/routers/test_users.py @@ -0,0 +1,168 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +def _get_user_by_id(data, param): + return next((item for item in data if item["id"] == param), None) + + +def _assert_user(data, id, **kwargs): + user = _get_user_by_id(data, id) + assert user is not None + comparison_data = { + "name": f"user {id}", + "email": f"user{id}@openwebui.com", + "profile_image_url": f"/user{id}.png", + "role": "user", + **kwargs, + } + for key, value in comparison_data.items(): + assert user[key] == value + + +class TestUsers(AbstractPostgresTest): + + BASE_PATH = "/api/v1/users" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.users import Users + + cls.users = Users + + def setup_method(self): + super().setup_method() + self.users.insert_new_user( + id="1", + name="user 1", + email="user1@openwebui.com", + profile_image_url="/user1.png", + role="user", + ) + self.users.insert_new_user( + id="2", + name="user 2", + email="user2@openwebui.com", + profile_image_url="/user2.png", + role="user", + ) + + def test_users(self): + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user(data, "2") + + # update role + with mock_webui_user(id="3"): + response = self.fast_api_client.post( + self.create_url("/update/role"), json={"id": "2", "role": "admin"} + ) + assert response.status_code == 200 + _assert_user([response.json()], "2", role="admin") + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user(data, "2", role="admin") + + # Get (empty) user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/user/settings")) + assert response.status_code == 200 + assert response.json() is None + + # Update user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/user/settings/update"), + json={ + "ui": {"attr1": "value1", "attr2": "value2"}, + "model_config": {"attr3": "value3", "attr4": "value4"}, + }, + ) + assert response.status_code == 200 + + # Get user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/user/settings")) + assert response.status_code == 200 + assert response.json() == { + "ui": {"attr1": "value1", "attr2": "value2"}, + "model_config": {"attr3": "value3", "attr4": "value4"}, + } + + # Get (empty) user info + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/user/info")) + assert response.status_code == 200 + assert response.json() is None + + # Update user info + with mock_webui_user(id="1"): + response = self.fast_api_client.post( + self.create_url("/user/info/update"), + json={"attr1": "value1", "attr2": "value2"}, + ) + assert response.status_code == 200 + + # Get user info + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/user/info")) + assert response.status_code == 200 + assert response.json() == {"attr1": "value1", "attr2": "value2"} + + # Get user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/2")) + assert response.status_code == 200 + assert response.json() == {"name": "user 2", "profile_image_url": "/user2.png"} + + # Update user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.post( + self.create_url("/2/update"), + json={ + "name": "user 2 updated", + "email": "user2-updated@openwebui.com", + "profile_image_url": "/user2-updated.png", + }, + ) + assert response.status_code == 200 + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user( + data, + "2", + role="admin", + name="user 2 updated", + email="user2-updated@openwebui.com", + profile_image_url="/user2-updated.png", + ) + + # Delete user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.delete(self.create_url("/2")) + assert response.status_code == 200 + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 1 + data = response.json() + _assert_user(data, "1") diff --git a/backend/test/util/abstract_integration_test.py b/backend/test/util/abstract_integration_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8535221a8575ec887b73492cba6d54f7f1b46c08 --- /dev/null +++ b/backend/test/util/abstract_integration_test.py @@ -0,0 +1,161 @@ +import logging +import os +import time + +import docker +import pytest +from docker import DockerClient +from pytest_docker.plugin import get_docker_ip +from fastapi.testclient import TestClient +from sqlalchemy import text, create_engine + + +log = logging.getLogger(__name__) + + +def get_fast_api_client(): + from main import app + + with TestClient(app) as c: + return c + + +class AbstractIntegrationTest: + BASE_PATH = None + + def create_url(self, path="", query_params=None): + if self.BASE_PATH is None: + raise Exception("BASE_PATH is not set") + parts = self.BASE_PATH.split("/") + parts = [part.strip() for part in parts if part.strip() != ""] + path_parts = path.split("/") + path_parts = [part.strip() for part in path_parts if part.strip() != ""] + query_parts = "" + if query_params: + query_parts = "&".join( + [f"{key}={value}" for key, value in query_params.items()] + ) + query_parts = f"?{query_parts}" + return "/".join(parts + path_parts) + query_parts + + @classmethod + def setup_class(cls): + pass + + def setup_method(self): + pass + + @classmethod + def teardown_class(cls): + pass + + def teardown_method(self): + pass + + +class AbstractPostgresTest(AbstractIntegrationTest): + DOCKER_CONTAINER_NAME = "postgres-test-container-will-get-deleted" + docker_client: DockerClient + + @classmethod + def _create_db_url(cls, env_vars_postgres: dict) -> str: + host = get_docker_ip() + user = env_vars_postgres["POSTGRES_USER"] + pw = env_vars_postgres["POSTGRES_PASSWORD"] + port = 8081 + db = env_vars_postgres["POSTGRES_DB"] + return f"postgresql://{user}:{pw}@{host}:{port}/{db}" + + @classmethod + def setup_class(cls): + super().setup_class() + try: + env_vars_postgres = { + "POSTGRES_USER": "user", + "POSTGRES_PASSWORD": "example", + "POSTGRES_DB": "openwebui", + } + cls.docker_client = docker.from_env() + cls.docker_client.containers.run( + "postgres:16.2", + detach=True, + environment=env_vars_postgres, + name=cls.DOCKER_CONTAINER_NAME, + ports={5432: ("0.0.0.0", 8081)}, + command="postgres -c log_statement=all", + ) + time.sleep(0.5) + + database_url = cls._create_db_url(env_vars_postgres) + os.environ["DATABASE_URL"] = database_url + retries = 10 + db = None + while retries > 0: + try: + from config import BACKEND_DIR + + db = create_engine(database_url, pool_pre_ping=True) + db = db.connect() + log.info("postgres is ready!") + break + except Exception as e: + log.warning(e) + time.sleep(3) + retries -= 1 + + if db: + # import must be after setting env! + cls.fast_api_client = get_fast_api_client() + db.close() + else: + raise Exception("Could not connect to Postgres") + except Exception as ex: + log.error(ex) + cls.teardown_class() + pytest.fail(f"Could not setup test environment: {ex}") + + def _check_db_connection(self): + from apps.webui.internal.db import Session + + retries = 10 + while retries > 0: + try: + Session.execute(text("SELECT 1")) + Session.commit() + break + except Exception as e: + Session.rollback() + log.warning(e) + time.sleep(3) + retries -= 1 + + def setup_method(self): + super().setup_method() + self._check_db_connection() + + @classmethod + def teardown_class(cls) -> None: + super().teardown_class() + cls.docker_client.containers.get(cls.DOCKER_CONTAINER_NAME).remove(force=True) + + def teardown_method(self): + from apps.webui.internal.db import Session + + # rollback everything not yet committed + Session.commit() + + # truncate all tables + tables = [ + "auth", + "chat", + "chatidtag", + "document", + "memory", + "model", + "prompt", + "tag", + '"user"', + ] + for table in tables: + Session.execute(text(f"TRUNCATE TABLE {table}")) + Session.commit() diff --git a/backend/test/util/mock_user.py b/backend/test/util/mock_user.py new file mode 100644 index 0000000000000000000000000000000000000000..8d0300d3f9a8ec1f9daf501afb9a0dbb9a7cf561 --- /dev/null +++ b/backend/test/util/mock_user.py @@ -0,0 +1,45 @@ +from contextlib import contextmanager + +from fastapi import FastAPI + + +@contextmanager +def mock_webui_user(**kwargs): + from apps.webui.main import app + + with mock_user(app, **kwargs): + yield + + +@contextmanager +def mock_user(app: FastAPI, **kwargs): + from utils.utils import ( + get_current_user, + get_verified_user, + get_admin_user, + get_current_user_by_api_key, + ) + from apps.webui.models.users import User + + def create_user(): + user_parameters = { + "id": "1", + "name": "John Doe", + "email": "john.doe@openwebui.com", + "role": "user", + "profile_image_url": "/user.png", + "last_active_at": 1627351200, + "updated_at": 1627351200, + "created_at": 162735120, + **kwargs, + } + return User(**user_parameters) + + app.dependency_overrides = { + get_current_user: create_user, + get_verified_user: create_user, + get_admin_user: create_user, + get_current_user_by_api_key: create_user, + } + yield + app.dependency_overrides = {} diff --git a/backend/utils/logo.png b/backend/utils/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..519af1db620dbf4de3694660dae7abd7392f0b3c Binary files /dev/null and b/backend/utils/logo.png differ diff --git a/backend/utils/misc.py b/backend/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..f44a7ce7ac51bdd8d3830ebbf6ded341faf426a7 --- /dev/null +++ b/backend/utils/misc.py @@ -0,0 +1,312 @@ +from pathlib import Path +import hashlib +import json +import re +from datetime import timedelta +from typing import Optional, List, Tuple +import uuid +import time + + +def get_last_user_message_item(messages: List[dict]) -> str: + for message in reversed(messages): + if message["role"] == "user": + return message + return None + + +def get_last_user_message(messages: List[dict]) -> str: + message = get_last_user_message_item(messages) + + if message is not None: + if isinstance(message["content"], list): + for item in message["content"]: + if item["type"] == "text": + return item["text"] + return message["content"] + return None + + +def get_last_assistant_message(messages: List[dict]) -> str: + for message in reversed(messages): + if message["role"] == "assistant": + if isinstance(message["content"], list): + for item in message["content"]: + if item["type"] == "text": + return item["text"] + return message["content"] + return None + + +def get_system_message(messages: List[dict]) -> dict: + for message in messages: + if message["role"] == "system": + return message + return None + + +def remove_system_message(messages: List[dict]) -> List[dict]: + return [message for message in messages if message["role"] != "system"] + + +def pop_system_message(messages: List[dict]) -> Tuple[dict, List[dict]]: + return get_system_message(messages), remove_system_message(messages) + + +def prepend_to_first_user_message_content( + content: str, messages: List[dict] +) -> List[dict]: + for message in messages: + if message["role"] == "user": + if isinstance(message["content"], list): + for item in message["content"]: + if item["type"] == "text": + item["text"] = f"{content}\n{item['text']}" + else: + message["content"] = f"{content}\n{message['content']}" + break + return messages + + +def add_or_update_system_message(content: str, messages: List[dict]): + """ + Adds a new system message at the beginning of the messages list + or updates the existing system message at the beginning. + + :param msg: The message to be added or appended. + :param messages: The list of message dictionaries. + :return: The updated list of message dictionaries. + """ + + if messages and messages[0].get("role") == "system": + messages[0]["content"] += f"{content}\n{messages[0]['content']}" + else: + # Insert at the beginning + messages.insert(0, {"role": "system", "content": content}) + + return messages + + +def stream_message_template(model: str, message: str): + return { + "id": f"{model}-{str(uuid.uuid4())}", + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": model, + "choices": [ + { + "index": 0, + "delta": {"content": message}, + "logprobs": None, + "finish_reason": None, + } + ], + } + + +def get_gravatar_url(email): + # Trim leading and trailing whitespace from + # an email address and force all characters + # to lower case + address = str(email).strip().lower() + + # Create a SHA256 hash of the final string + hash_object = hashlib.sha256(address.encode()) + hash_hex = hash_object.hexdigest() + + # Grab the actual image URL + return f"https://www.gravatar.com/avatar/{hash_hex}?d=mp" + + +def calculate_sha256(file): + sha256 = hashlib.sha256() + # Read the file in chunks to efficiently handle large files + for chunk in iter(lambda: file.read(8192), b""): + sha256.update(chunk) + return sha256.hexdigest() + + +def calculate_sha256_string(string): + # Create a new SHA-256 hash object + sha256_hash = hashlib.sha256() + # Update the hash object with the bytes of the input string + sha256_hash.update(string.encode("utf-8")) + # Get the hexadecimal representation of the hash + hashed_string = sha256_hash.hexdigest() + return hashed_string + + +def validate_email_format(email: str) -> bool: + if email.endswith("@localhost"): + return True + + return bool(re.match(r"[^@]+@[^@]+\.[^@]+", email)) + + +def sanitize_filename(file_name): + # Convert to lowercase + lower_case_file_name = file_name.lower() + + # Remove special characters using regular expression + sanitized_file_name = re.sub(r"[^\w\s]", "", lower_case_file_name) + + # Replace spaces with dashes + final_file_name = re.sub(r"\s+", "-", sanitized_file_name) + + return final_file_name + + +def extract_folders_after_data_docs(path): + # Convert the path to a Path object if it's not already + path = Path(path) + + # Extract parts of the path + parts = path.parts + + # Find the index of '/data/docs' in the path + try: + index_data_docs = parts.index("data") + 1 + index_docs = parts.index("docs", index_data_docs) + 1 + except ValueError: + return [] + + # Exclude the filename and accumulate folder names + tags = [] + + folders = parts[index_docs:-1] + for idx, part in enumerate(folders): + tags.append("/".join(folders[: idx + 1])) + + return tags + + +def parse_duration(duration: str) -> Optional[timedelta]: + if duration == "-1" or duration == "0": + return None + + # Regular expression to find number and unit pairs + pattern = r"(-?\d+(\.\d+)?)(ms|s|m|h|d|w)" + matches = re.findall(pattern, duration) + + if not matches: + raise ValueError("Invalid duration string") + + total_duration = timedelta() + + for number, _, unit in matches: + number = float(number) + if unit == "ms": + total_duration += timedelta(milliseconds=number) + elif unit == "s": + total_duration += timedelta(seconds=number) + elif unit == "m": + total_duration += timedelta(minutes=number) + elif unit == "h": + total_duration += timedelta(hours=number) + elif unit == "d": + total_duration += timedelta(days=number) + elif unit == "w": + total_duration += timedelta(weeks=number) + + return total_duration + + +def parse_ollama_modelfile(model_text): + parameters_meta = { + "mirostat": int, + "mirostat_eta": float, + "mirostat_tau": float, + "num_ctx": int, + "repeat_last_n": int, + "repeat_penalty": float, + "temperature": float, + "seed": int, + "tfs_z": float, + "num_predict": int, + "top_k": int, + "top_p": float, + "num_keep": int, + "typical_p": float, + "presence_penalty": float, + "frequency_penalty": float, + "penalize_newline": bool, + "numa": bool, + "num_batch": int, + "num_gpu": int, + "main_gpu": int, + "low_vram": bool, + "f16_kv": bool, + "vocab_only": bool, + "use_mmap": bool, + "use_mlock": bool, + "num_thread": int, + } + + data = {"base_model_id": None, "params": {}} + + # Parse base model + base_model_match = re.search( + r"^FROM\s+(\w+)", model_text, re.MULTILINE | re.IGNORECASE + ) + if base_model_match: + data["base_model_id"] = base_model_match.group(1) + + # Parse template + template_match = re.search( + r'TEMPLATE\s+"""(.+?)"""', model_text, re.DOTALL | re.IGNORECASE + ) + if template_match: + data["params"] = {"template": template_match.group(1).strip()} + + # Parse stops + stops = re.findall(r'PARAMETER stop "(.*?)"', model_text, re.IGNORECASE) + if stops: + data["params"]["stop"] = stops + + # Parse other parameters from the provided list + for param, param_type in parameters_meta.items(): + param_match = re.search(rf"PARAMETER {param} (.+)", model_text, re.IGNORECASE) + if param_match: + value = param_match.group(1) + + try: + if param_type == int: + value = int(value) + elif param_type == float: + value = float(value) + elif param_type == bool: + value = value.lower() == "true" + except Exception as e: + print(e) + continue + + data["params"][param] = value + + # Parse adapter + adapter_match = re.search(r"ADAPTER (.+)", model_text, re.IGNORECASE) + if adapter_match: + data["params"]["adapter"] = adapter_match.group(1) + + # Parse system description + system_desc_match = re.search( + r'SYSTEM\s+"""(.+?)"""', model_text, re.DOTALL | re.IGNORECASE + ) + system_desc_match_single = re.search( + r"SYSTEM\s+([^\n]+)", model_text, re.IGNORECASE + ) + + if system_desc_match: + data["params"]["system"] = system_desc_match.group(1).strip() + elif system_desc_match_single: + data["params"]["system"] = system_desc_match_single.group(1).strip() + + # Parse messages + messages = [] + message_matches = re.findall(r"MESSAGE (\w+) (.+)", model_text, re.IGNORECASE) + for role, content in message_matches: + messages.append({"role": role, "content": content}) + + if messages: + data["params"]["messages"] = messages + + return data diff --git a/backend/utils/task.py b/backend/utils/task.py new file mode 100644 index 0000000000000000000000000000000000000000..053a526a801ccf8a07bafbdb5ad7e5f1a222e808 --- /dev/null +++ b/backend/utils/task.py @@ -0,0 +1,127 @@ +import re +import math + +from datetime import datetime +from typing import Optional + + +def prompt_template( + template: str, user_name: str = None, user_location: str = None +) -> str: + # Get the current date + current_date = datetime.now() + + # Format the date to YYYY-MM-DD + formatted_date = current_date.strftime("%Y-%m-%d") + formatted_time = current_date.strftime("%I:%M:%S %p") + + template = template.replace("{{CURRENT_DATE}}", formatted_date) + template = template.replace("{{CURRENT_TIME}}", formatted_time) + template = template.replace( + "{{CURRENT_DATETIME}}", f"{formatted_date} {formatted_time}" + ) + + if user_name: + # Replace {{USER_NAME}} in the template with the user's name + template = template.replace("{{USER_NAME}}", user_name) + else: + # Replace {{USER_NAME}} in the template with "Unknown" + template = template.replace("{{USER_NAME}}", "Unknown") + + if user_location: + # Replace {{USER_LOCATION}} in the template with the current location + template = template.replace("{{USER_LOCATION}}", user_location) + else: + # Replace {{USER_LOCATION}} in the template with "Unknown" + template = template.replace("{{USER_LOCATION}}", "Unknown") + + return template + + +def title_generation_template( + template: str, prompt: str, user: Optional[dict] = None +) -> str: + def replacement_function(match): + full_match = match.group(0) + start_length = match.group(1) + end_length = match.group(2) + middle_length = match.group(3) + + if full_match == "{{prompt}}": + return prompt + elif start_length is not None: + return prompt[: int(start_length)] + elif end_length is not None: + return prompt[-int(end_length) :] + elif middle_length is not None: + middle_length = int(middle_length) + if len(prompt) <= middle_length: + return prompt + start = prompt[: math.ceil(middle_length / 2)] + end = prompt[-math.floor(middle_length / 2) :] + return f"{start}...{end}" + return "" + + template = re.sub( + r"{{prompt}}|{{prompt:start:(\d+)}}|{{prompt:end:(\d+)}}|{{prompt:middletruncate:(\d+)}}", + replacement_function, + template, + ) + + template = prompt_template( + template, + **( + {"user_name": user.get("name"), "user_location": user.get("location")} + if user + else {} + ), + ) + + return template + + +def search_query_generation_template( + template: str, prompt: str, user: Optional[dict] = None +) -> str: + + def replacement_function(match): + full_match = match.group(0) + start_length = match.group(1) + end_length = match.group(2) + middle_length = match.group(3) + + if full_match == "{{prompt}}": + return prompt + elif start_length is not None: + return prompt[: int(start_length)] + elif end_length is not None: + return prompt[-int(end_length) :] + elif middle_length is not None: + middle_length = int(middle_length) + if len(prompt) <= middle_length: + return prompt + start = prompt[: math.ceil(middle_length / 2)] + end = prompt[-math.floor(middle_length / 2) :] + return f"{start}...{end}" + return "" + + template = re.sub( + r"{{prompt}}|{{prompt:start:(\d+)}}|{{prompt:end:(\d+)}}|{{prompt:middletruncate:(\d+)}}", + replacement_function, + template, + ) + + template = prompt_template( + template, + **( + {"user_name": user.get("name"), "user_location": user.get("location")} + if user + else {} + ), + ) + return template + + +def tools_function_calling_generation_template(template: str, tools_specs: str) -> str: + template = template.replace("{{TOOLS}}", tools_specs) + return template diff --git a/backend/utils/tools.py b/backend/utils/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..3e5d82fd6d15255e1f89faab2ec91b037eb88116 --- /dev/null +++ b/backend/utils/tools.py @@ -0,0 +1,79 @@ +import inspect +from typing import get_type_hints, List, Dict, Any + + +def doc_to_dict(docstring): + lines = docstring.split("\n") + description = lines[1].strip() + param_dict = {} + + for line in lines: + if ":param" in line: + line = line.replace(":param", "").strip() + param, desc = line.split(":", 1) + param_dict[param.strip()] = desc.strip() + ret_dict = {"description": description, "params": param_dict} + return ret_dict + + +def get_tools_specs(tools) -> List[dict]: + function_list = [ + {"name": func, "function": getattr(tools, func)} + for func in dir(tools) + if callable(getattr(tools, func)) + and not func.startswith("__") + and not inspect.isclass(getattr(tools, func)) + ] + + specs = [] + for function_item in function_list: + function_name = function_item["name"] + function = function_item["function"] + + function_doc = doc_to_dict(function.__doc__ or function_name) + specs.append( + { + "name": function_name, + # TODO: multi-line desc? + "description": function_doc.get("description", function_name), + "parameters": { + "type": "object", + "properties": { + param_name: { + "type": param_annotation.__name__.lower(), + **( + { + "enum": ( + str(param_annotation.__args__) + if hasattr(param_annotation, "__args__") + else None + ) + } + if hasattr(param_annotation, "__args__") + else {} + ), + "description": function_doc.get("params", {}).get( + param_name, param_name + ), + } + for param_name, param_annotation in get_type_hints( + function + ).items() + if param_name != "return" + and not ( + param_name.startswith("__") and param_name.endswith("__") + ) + }, + "required": [ + name + for name, param in inspect.signature( + function + ).parameters.items() + if param.default is param.empty + and not (name.startswith("__") and name.endswith("__")) + ], + }, + } + ) + + return specs diff --git a/backend/utils/utils.py b/backend/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fbc539af5c2de74edbb58c4273c211363b51ffdb --- /dev/null +++ b/backend/utils/utils.py @@ -0,0 +1,145 @@ +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from fastapi import HTTPException, status, Depends, Request +from sqlalchemy.orm import Session + +from apps.webui.models.users import Users + +from pydantic import BaseModel +from typing import Union, Optional +from constants import ERROR_MESSAGES +from passlib.context import CryptContext +from datetime import datetime, timedelta +import requests +import jwt +import uuid +import logging +import config + +logging.getLogger("passlib").setLevel(logging.ERROR) + + +SESSION_SECRET = config.WEBUI_SECRET_KEY +ALGORITHM = "HS256" + +############## +# Auth Utils +############## + +bearer_security = HTTPBearer(auto_error=False) +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +def verify_password(plain_password, hashed_password): + return ( + pwd_context.verify(plain_password, hashed_password) if hashed_password else None + ) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +def create_token(data: dict, expires_delta: Union[timedelta, None] = None) -> str: + payload = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + payload.update({"exp": expire}) + + encoded_jwt = jwt.encode(payload, SESSION_SECRET, algorithm=ALGORITHM) + return encoded_jwt + + +def decode_token(token: str) -> Optional[dict]: + try: + decoded = jwt.decode(token, SESSION_SECRET, algorithms=[ALGORITHM]) + return decoded + except Exception as e: + return None + + +def extract_token_from_auth_header(auth_header: str): + return auth_header[len("Bearer ") :] + + +def create_api_key(): + key = str(uuid.uuid4()).replace("-", "") + return f"sk-{key}" + + +def get_http_authorization_cred(auth_header: str): + try: + scheme, credentials = auth_header.split(" ") + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + except: + raise ValueError(ERROR_MESSAGES.INVALID_TOKEN) + + +def get_current_user( + request: Request, + auth_token: HTTPAuthorizationCredentials = Depends(bearer_security), +): + token = None + + if auth_token is not None: + token = auth_token.credentials + + if token is None and "token" in request.cookies: + token = request.cookies.get("token") + + if token is None: + raise HTTPException(status_code=403, detail="Not authenticated") + + # auth by api key + if token.startswith("sk-"): + return get_current_user_by_api_key(token) + + # auth by jwt token + data = decode_token(token) + if data != None and "id" in data: + user = Users.get_user_by_id(data["id"]) + if user is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.INVALID_TOKEN, + ) + else: + Users.update_user_last_active_by_id(user.id) + return user + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.UNAUTHORIZED, + ) + + +def get_current_user_by_api_key(api_key: str): + user = Users.get_user_by_api_key(api_key) + + if user is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.INVALID_TOKEN, + ) + else: + Users.update_user_last_active_by_id(user.id) + + return user + + +def get_verified_user(user=Depends(get_current_user)): + if user.role not in {"user", "admin"}: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + return user + + +def get_admin_user(user=Depends(get_current_user)): + if user.role != "admin": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=ERROR_MESSAGES.ACCESS_PROHIBITED, + ) + return user diff --git a/backend/utils/webhook.py b/backend/utils/webhook.py new file mode 100644 index 0000000000000000000000000000000000000000..b6692e53a7bc3e93c69a1d5c8ac62565d636254f --- /dev/null +++ b/backend/utils/webhook.py @@ -0,0 +1,54 @@ +import json +import requests +import logging + +from config import SRC_LOG_LEVELS, VERSION, WEBUI_FAVICON_URL, WEBUI_NAME + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["WEBHOOK"]) + + +def post_webhook(url: str, message: str, event_data: dict) -> bool: + try: + payload = {} + + # Slack and Google Chat Webhooks + if "https://hooks.slack.com" in url or "https://chat.googleapis.com" in url: + payload["text"] = message + # Discord Webhooks + elif "https://discord.com/api/webhooks" in url: + payload["content"] = message + # Microsoft Teams Webhooks + elif "webhook.office.com" in url: + action = event_data.get("action", "undefined") + facts = [ + {"name": name, "value": value} + for name, value in json.loads(event_data.get("user", {})).items() + ] + payload = { + "@type": "MessageCard", + "@context": "http://schema.org/extensions", + "themeColor": "0076D7", + "summary": message, + "sections": [ + { + "activityTitle": message, + "activitySubtitle": f"{WEBUI_NAME} ({VERSION}) - {action}", + "activityImage": WEBUI_FAVICON_URL, + "facts": facts, + "markdown": True, + } + ], + } + # Default Payload + else: + payload = {**event_data} + + log.debug(f"payload: {payload}") + r = requests.post(url, json=payload) + r.raise_for_status() + log.debug(f"r.text: {r.text}") + return True + except Exception as e: + log.exception(e) + return False diff --git a/bun.lockb b/bun.lockb new file mode 100755 index 0000000000000000000000000000000000000000..e0a038da06fc8f3a52efc9e870fa86124ec5ad14 Binary files /dev/null and b/bun.lockb differ diff --git a/confirm_remove.sh b/confirm_remove.sh new file mode 100755 index 0000000000000000000000000000000000000000..051908e6de668bc47b3edd5b79b70d65094823a2 --- /dev/null +++ b/confirm_remove.sh @@ -0,0 +1,13 @@ +#!/bin/bash +echo "Warning: This will remove all containers and volumes, including persistent data. Do you want to continue? [Y/N]" +read ans +if [ "$ans" == "Y" ] || [ "$ans" == "y" ]; then + command docker-compose 2>/dev/null + if [ "$?" == "0" ]; then + docker-compose down -v + else + docker compose down -v + fi +else + echo "Operation cancelled." +fi diff --git a/cypress.config.ts b/cypress.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..dbb538233809b1c26de4c0d33ae45e0c30405c2a --- /dev/null +++ b/cypress.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from 'cypress'; + +export default defineConfig({ + e2e: { + baseUrl: 'http://localhost:8080' + }, + video: true +}); diff --git a/cypress/data/example-doc.txt b/cypress/data/example-doc.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4f6f455ed18711baf9ddb8ae814538ea7076e8d --- /dev/null +++ b/cypress/data/example-doc.txt @@ -0,0 +1,9 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Pellentesque elit eget gravida cum sociis natoque. Morbi tristique senectus et netus et malesuada. Sapien nec sagittis aliquam malesuada bibendum. Amet consectetur adipiscing elit duis tristique sollicitudin. Non pulvinar neque laoreet suspendisse interdum consectetur libero. Arcu cursus vitae congue mauris rhoncus aenean vel elit scelerisque. Nec feugiat nisl pretium fusce id velit. Imperdiet proin fermentum leo vel. Arcu dui vivamus arcu felis bibendum ut tristique et egestas. Pellentesque sit amet porttitor eget dolor morbi non arcu risus. Egestas tellus rutrum tellus pellentesque eu tincidunt tortor aliquam. Et ultrices neque ornare aenean euismod. + +Enim nulla aliquet porttitor lacus luctus accumsan tortor posuere ac. Viverra nibh cras pulvinar mattis nunc. Lacinia at quis risus sed vulputate. Ac tortor vitae purus faucibus ornare suspendisse sed nisi lacus. Bibendum arcu vitae elementum curabitur vitae nunc. Consectetur adipiscing elit duis tristique sollicitudin nibh sit amet commodo. Velit egestas dui id ornare arcu odio ut. Et malesuada fames ac turpis egestas integer eget aliquet. Lacus suspendisse faucibus interdum posuere lorem ipsum dolor sit. Morbi tristique senectus et netus. Pretium viverra suspendisse potenti nullam ac tortor vitae. Parturient montes nascetur ridiculus mus mauris vitae. Quis viverra nibh cras pulvinar mattis nunc sed blandit libero. Euismod nisi porta lorem mollis aliquam ut porttitor leo. Mauris in aliquam sem fringilla ut morbi. Faucibus pulvinar elementum integer enim neque. Neque sodales ut etiam sit. Consectetur a erat nam at. + +Sed nisi lacus sed viverra tellus in hac habitasse. Proin sagittis nisl rhoncus mattis rhoncus. Risus commodo viverra maecenas accumsan lacus. Morbi quis commodo odio aenean sed adipiscing. Mollis nunc sed id semper risus in. Ultricies mi eget mauris pharetra et ultrices neque. Amet luctus venenatis lectus magna fringilla urna porttitor rhoncus. Eget magna fermentum iaculis eu non diam phasellus. Id diam maecenas ultricies mi eget mauris pharetra et ultrices. Id donec ultrices tincidunt arcu non sodales. Sed cras ornare arcu dui vivamus arcu felis bibendum ut. Urna duis convallis convallis tellus id interdum velit. Rhoncus mattis rhoncus urna neque viverra justo nec. Purus semper eget duis at tellus at urna condimentum. Et odio pellentesque diam volutpat commodo sed egestas. Blandit volutpat maecenas volutpat blandit. In egestas erat imperdiet sed euismod nisi porta lorem mollis. Est ullamcorper eget nulla facilisi etiam dignissim. + +Justo nec ultrices dui sapien eget mi proin sed. Purus gravida quis blandit turpis cursus in hac. Placerat orci nulla pellentesque dignissim enim sit. Morbi tristique senectus et netus et malesuada fames ac. Consequat mauris nunc congue nisi. Eu lobortis elementum nibh tellus molestie nunc non blandit. Viverra justo nec ultrices dui. Morbi non arcu risus quis. Elementum sagittis vitae et leo duis. Lectus mauris ultrices eros in cursus. Neque laoreet suspendisse interdum consectetur. + +Facilisis gravida neque convallis a cras. Nisl rhoncus mattis rhoncus urna neque viverra justo. Faucibus purus in massa tempor. Lacus laoreet non curabitur gravida arcu ac tortor. Tincidunt eget nullam non nisi est sit amet. Ornare lectus sit amet est placerat in egestas. Sollicitudin tempor id eu nisl nunc mi. Scelerisque viverra mauris in aliquam sem fringilla ut. Ullamcorper sit amet risus nullam. Mauris rhoncus aenean vel elit scelerisque mauris pellentesque pulvinar. Velit euismod in pellentesque massa placerat duis ultricies lacus. Pharetra magna ac placerat vestibulum lectus mauris ultrices eros in. Lorem ipsum dolor sit amet. Sit amet mauris commodo quis imperdiet. Quam pellentesque nec nam aliquam sem et tortor. Amet nisl purus in mollis nunc. Sed risus pretium quam vulputate dignissim suspendisse in est. Nisl condimentum id venenatis a condimentum. Velit euismod in pellentesque massa. Quam id leo in vitae turpis massa sed. diff --git a/cypress/e2e/chat.cy.ts b/cypress/e2e/chat.cy.ts new file mode 100644 index 0000000000000000000000000000000000000000..ddb33d6c06b54bcbe562241c73569c710b900948 --- /dev/null +++ b/cypress/e2e/chat.cy.ts @@ -0,0 +1,101 @@ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// + +// These tests run through the chat flow. +describe('Settings', () => { + // Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames + after(() => { + // eslint-disable-next-line cypress/no-unnecessary-waiting + cy.wait(2000); + }); + + beforeEach(() => { + // Login as the admin user + cy.loginAdmin(); + // Visit the home page + cy.visit('/'); + }); + + context('Ollama', () => { + it('user can select a model', () => { + // Click on the model selector + cy.get('button[aria-label="Select a model"]').click(); + // Select the first model + cy.get('button[aria-label="model-item"]').first().click(); + }); + + it('user can perform text chat', () => { + // Click on the model selector + cy.get('button[aria-label="Select a model"]').click(); + // Select the first model + cy.get('button[aria-label="model-item"]').first().click(); + // Type a message + cy.get('#chat-textarea').type('Hi, what can you do? A single sentence only please.', { + force: true + }); + // Send the message + cy.get('button[type="submit"]').click(); + // User's message should be visible + cy.get('.chat-user').should('exist'); + // Wait for the response + cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received + .find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received + .should('exist'); + }); + + it('user can share chat', () => { + // Click on the model selector + cy.get('button[aria-label="Select a model"]').click(); + // Select the first model + cy.get('button[aria-label="model-item"]').first().click(); + // Type a message + cy.get('#chat-textarea').type('Hi, what can you do? A single sentence only please.', { + force: true + }); + // Send the message + cy.get('button[type="submit"]').click(); + // User's message should be visible + cy.get('.chat-user').should('exist'); + // Wait for the response + cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received + .find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received + .should('exist'); + // spy on requests + const spy = cy.spy(); + cy.intercept('GET', '/api/v1/chats/*', spy); + // Open context menu + cy.get('#chat-context-menu-button').click(); + // Click share button + cy.get('#chat-share-button').click(); + // Check if the share dialog is visible + cy.get('#copy-and-share-chat-button').should('exist'); + cy.wrap({}, { timeout: 5000 }).should(() => { + // Check if the request was made twice (once for to replace chat object and once more due to change event) + expect(spy).to.be.callCount(2); + }); + }); + + it('user can generate image', () => { + // Click on the model selector + cy.get('button[aria-label="Select a model"]').click(); + // Select the first model + cy.get('button[aria-label="model-item"]').first().click(); + // Type a message + cy.get('#chat-textarea').type('Hi, what can you do? A single sentence only please.', { + force: true + }); + // Send the message + cy.get('button[type="submit"]').click(); + // User's message should be visible + cy.get('.chat-user').should('exist'); + // Wait for the response + cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received + .find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received + .should('exist'); + // Click on the generate image button + cy.get('[aria-label="Generate Image"]').click(); + // Wait for image to be visible + cy.get('img[data-cy="image"]', { timeout: 60_000 }).should('be.visible'); + }); + }); +}); diff --git a/cypress/e2e/documents.cy.ts b/cypress/e2e/documents.cy.ts new file mode 100644 index 0000000000000000000000000000000000000000..6ca14980d2520f0c089a8f0306d04f5d7e5890e6 --- /dev/null +++ b/cypress/e2e/documents.cy.ts @@ -0,0 +1,46 @@ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// + +describe('Documents', () => { + const timestamp = Date.now(); + + before(() => { + cy.uploadTestDocument(timestamp); + }); + + after(() => { + cy.deleteTestDocument(timestamp); + }); + + context('Admin', () => { + beforeEach(() => { + // Login as the admin user + cy.loginAdmin(); + // Visit the home page + cy.visit('/workspace/documents'); + cy.get('button').contains('#cypress-test').click(); + }); + + it('can see documents', () => { + cy.get('div').contains(`document-test-initial-${timestamp}.txt`).should('have.length', 1); + }); + + it('can see edit button', () => { + cy.get('div') + .contains(`document-test-initial-${timestamp}.txt`) + .get("button[aria-label='Edit Doc']") + .should('exist'); + }); + + it('can see delete button', () => { + cy.get('div') + .contains(`document-test-initial-${timestamp}.txt`) + .get("button[aria-label='Delete Doc']") + .should('exist'); + }); + + it('can see upload button', () => { + cy.get("button[aria-label='Add Docs']").should('exist'); + }); + }); +}); diff --git a/cypress/e2e/registration.cy.ts b/cypress/e2e/registration.cy.ts new file mode 100644 index 0000000000000000000000000000000000000000..232d75e882a93870f318f96f98ffd56efbc4bd8f --- /dev/null +++ b/cypress/e2e/registration.cy.ts @@ -0,0 +1,52 @@ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// +import { adminUser } from '../support/e2e'; + +// These tests assume the following defaults: +// 1. No users exist in the database or that the test admin user is an admin +// 2. Language is set to English +// 3. The default role for new users is 'pending' +describe('Registration and Login', () => { + // Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames + after(() => { + // eslint-disable-next-line cypress/no-unnecessary-waiting + cy.wait(2000); + }); + + beforeEach(() => { + cy.visit('/'); + }); + + it('should register a new user as pending', () => { + const userName = `Test User - ${Date.now()}`; + const userEmail = `cypress-${Date.now()}@example.com`; + // Toggle from sign in to sign up + cy.contains('Sign up').click(); + // Fill out the form + cy.get('input[autocomplete="name"]').type(userName); + cy.get('input[autocomplete="email"]').type(userEmail); + cy.get('input[type="password"]').type('password'); + // Submit the form + cy.get('button[type="submit"]').click(); + // Wait until the user is redirected to the home page + cy.contains(userName); + // Expect the user to be pending + cy.contains('Check Again'); + }); + + it('can login with the admin user', () => { + // Fill out the form + cy.get('input[autocomplete="email"]').type(adminUser.email); + cy.get('input[type="password"]').type(adminUser.password); + // Submit the form + cy.get('button[type="submit"]').click(); + // Wait until the user is redirected to the home page + cy.contains(adminUser.name); + // Dismiss the changelog dialog if it is visible + cy.getAllLocalStorage().then((ls) => { + if (!ls['version']) { + cy.get('button').contains("Okay, Let's Go!").click(); + } + }); + }); +}); diff --git a/cypress/e2e/settings.cy.ts b/cypress/e2e/settings.cy.ts new file mode 100644 index 0000000000000000000000000000000000000000..4ea91698072fd25cb75a6961ac3d9f0652c303da --- /dev/null +++ b/cypress/e2e/settings.cy.ts @@ -0,0 +1,63 @@ +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// +import { adminUser } from '../support/e2e'; + +// These tests run through the various settings pages, ensuring that the user can interact with them as expected +describe('Settings', () => { + // Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames + after(() => { + // eslint-disable-next-line cypress/no-unnecessary-waiting + cy.wait(2000); + }); + + beforeEach(() => { + // Login as the admin user + cy.loginAdmin(); + // Visit the home page + cy.visit('/'); + // Click on the user menu + cy.get('button[aria-label="User Menu"]').click(); + // Click on the settings link + cy.get('button').contains('Settings').click(); + }); + + context('General', () => { + it('user can open the General modal and hit save', () => { + cy.get('button').contains('General').click(); + cy.get('button').contains('Save').click(); + }); + }); + + context('Interface', () => { + it('user can open the Interface modal and hit save', () => { + cy.get('button').contains('Interface').click(); + cy.get('button').contains('Save').click(); + }); + }); + + context('Audio', () => { + it('user can open the Audio modal and hit save', () => { + cy.get('button').contains('Audio').click(); + cy.get('button').contains('Save').click(); + }); + }); + + context('Chats', () => { + it('user can open the Chats modal', () => { + cy.get('button').contains('Chats').click(); + }); + }); + + context('Account', () => { + it('user can open the Account modal and hit save', () => { + cy.get('button').contains('Account').click(); + cy.get('button').contains('Save').click(); + }); + }); + + context('About', () => { + it('user can open the About modal', () => { + cy.get('button').contains('About').click(); + }); + }); +}); diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts new file mode 100644 index 0000000000000000000000000000000000000000..9847887333348e70ca7ccaeb560f34fbb7486904 --- /dev/null +++ b/cypress/support/e2e.ts @@ -0,0 +1,122 @@ +/// +// eslint-disable-next-line @typescript-eslint/triple-slash-reference +/// + +export const adminUser = { + name: 'Admin User', + email: 'admin@example.com', + password: 'password' +}; + +const login = (email: string, password: string) => { + return cy.session( + email, + () => { + // Make sure to test against us english to have stable tests, + // regardless on local language preferences + localStorage.setItem('locale', 'en-US'); + // Visit auth page + cy.visit('/auth'); + // Fill out the form + cy.get('input[autocomplete="email"]').type(email); + cy.get('input[type="password"]').type(password); + // Submit the form + cy.get('button[type="submit"]').click(); + // Wait until the user is redirected to the home page + cy.get('#chat-search').should('exist'); + // Get the current version to skip the changelog dialog + if (localStorage.getItem('version') === null) { + cy.get('button').contains("Okay, Let's Go!").click(); + } + }, + { + validate: () => { + cy.request({ + method: 'GET', + url: '/api/v1/auths/', + headers: { + Authorization: 'Bearer ' + localStorage.getItem('token') + } + }); + } + } + ); +}; + +const register = (name: string, email: string, password: string) => { + return cy + .request({ + method: 'POST', + url: '/api/v1/auths/signup', + body: { + name: name, + email: email, + password: password + }, + failOnStatusCode: false + }) + .then((response) => { + expect(response.status).to.be.oneOf([200, 400]); + }); +}; + +const registerAdmin = () => { + return register(adminUser.name, adminUser.email, adminUser.password); +}; + +const loginAdmin = () => { + return login(adminUser.email, adminUser.password); +}; + +Cypress.Commands.add('login', (email, password) => login(email, password)); +Cypress.Commands.add('register', (name, email, password) => register(name, email, password)); +Cypress.Commands.add('registerAdmin', () => registerAdmin()); +Cypress.Commands.add('loginAdmin', () => loginAdmin()); + +Cypress.Commands.add('uploadTestDocument', (suffix: any) => { + // Login as admin + cy.loginAdmin(); + // upload example document + cy.visit('/workspace/documents'); + // Create a document + cy.get("button[aria-label='Add Docs']").click(); + cy.readFile('cypress/data/example-doc.txt').then((text) => { + // select file + cy.get('#upload-doc-input').selectFile( + { + contents: Cypress.Buffer.from(text + Date.now()), + fileName: `document-test-initial-${suffix}.txt`, + mimeType: 'text/plain', + lastModified: Date.now() + }, + { + force: true + } + ); + // open tag input + cy.get("button[aria-label='Add Tag']").click(); + cy.get("input[placeholder='Add a tag']").type('cypress-test'); + cy.get("button[aria-label='Save Tag']").click(); + + // submit to upload + cy.get("button[type='submit']").click(); + + // wait for upload to finish + cy.get('button').contains('#cypress-test').should('exist'); + cy.get('div').contains(`document-test-initial-${suffix}.txt`).should('exist'); + }); +}); + +Cypress.Commands.add('deleteTestDocument', (suffix: any) => { + cy.loginAdmin(); + cy.visit('/workspace/documents'); + // clean up uploaded documents + cy.get('div') + .contains(`document-test-initial-${suffix}.txt`) + .find("button[aria-label='Delete Doc']") + .click(); +}); + +before(() => { + cy.registerAdmin(); +}); diff --git a/cypress/support/index.d.ts b/cypress/support/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..647db92115a3c6ee9e5b35b90b585a2a0a837306 --- /dev/null +++ b/cypress/support/index.d.ts @@ -0,0 +1,13 @@ +// load the global Cypress types +/// + +declare namespace Cypress { + interface Chainable { + login(email: string, password: string): Chainable; + register(name: string, email: string, password: string): Chainable; + registerAdmin(): Chainable; + loginAdmin(): Chainable; + uploadTestDocument(suffix: any): Chainable; + deleteTestDocument(suffix: any): Chainable; + } +} diff --git a/cypress/tsconfig.json b/cypress/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..ff28d946496fa69c603c0ec9287317a7dcc83279 --- /dev/null +++ b/cypress/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "inlineSourceMap": true, + "sourceMap": false + } +} diff --git a/docker-compose.a1111-test.yaml b/docker-compose.a1111-test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e6ab12c07f0995369107943d0b23a68ebac607b3 --- /dev/null +++ b/docker-compose.a1111-test.yaml @@ -0,0 +1,31 @@ +# This is an overlay that spins up stable-diffusion-webui for integration testing +# This is not designed to be used in production +services: + stable-diffusion-webui: + # Not built for ARM64 + platform: linux/amd64 + image: ghcr.io/neggles/sd-webui-docker:latest + restart: unless-stopped + environment: + CLI_ARGS: "--api --use-cpu all --precision full --no-half --skip-torch-cuda-test --ckpt /empty.pt --do-not-download-clip --disable-nan-check --disable-opt-split-attention" + PYTHONUNBUFFERED: "1" + TERM: "vt100" + SD_WEBUI_VARIANT: "default" + # Hack to get container working on Apple Silicon + # Rosetta creates a conflict ${HOME}/.cache folder + entrypoint: /bin/bash + command: + - -c + - | + export HOME=/root-home + rm -rf $${HOME}/.cache + /docker/entrypoint.sh python -u webui.py --listen --port $${WEBUI_PORT} --skip-version-check $${CLI_ARGS} + volumes: + - ./test/test_files/image_gen/sd-empty.pt:/empty.pt + + open-webui: + environment: + ENABLE_IMAGE_GENERATION: "true" + AUTOMATIC1111_BASE_URL: http://stable-diffusion-webui:7860 + IMAGE_SIZE: "64x64" + IMAGE_STEPS: "3" diff --git a/docker-compose.amdgpu.yaml b/docker-compose.amdgpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7a1295d94515c6f32a9c11d913ce603a989f504a --- /dev/null +++ b/docker-compose.amdgpu.yaml @@ -0,0 +1,8 @@ +services: + ollama: + devices: + - /dev/kfd:/dev/kfd + - /dev/dri:/dev/dri + image: ollama/ollama:${OLLAMA_DOCKER_TAG-rocm} + environment: + - 'HSA_OVERRIDE_GFX_VERSION=${HSA_OVERRIDE_GFX_VERSION-11.0.0}' \ No newline at end of file diff --git a/docker-compose.api.yaml b/docker-compose.api.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8f8fbe59ad03274ef4f6a414cc9333290588e0aa --- /dev/null +++ b/docker-compose.api.yaml @@ -0,0 +1,5 @@ +services: + ollama: + # Expose Ollama API outside the container stack + ports: + - ${OLLAMA_WEBAPI_PORT-11434}:11434 diff --git a/docker-compose.data.yaml b/docker-compose.data.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4b70601f894a1ad3530cc25ea616a3a5ddb691a6 --- /dev/null +++ b/docker-compose.data.yaml @@ -0,0 +1,4 @@ +services: + ollama: + volumes: + - ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama diff --git a/docker-compose.gpu.yaml b/docker-compose.gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..de821235da42f5e116315f7407d2c6de0451c99c --- /dev/null +++ b/docker-compose.gpu.yaml @@ -0,0 +1,11 @@ +services: + ollama: + # GPU support + deploy: + resources: + reservations: + devices: + - driver: ${OLLAMA_GPU_DRIVER-nvidia} + count: ${OLLAMA_GPU_COUNT-1} + capabilities: + - gpu diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74249febd9e37e2166fa6c07a73770812e745b7d --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,34 @@ +services: + ollama: + volumes: + - ollama:/root/.ollama + container_name: ollama + pull_policy: always + tty: true + restart: unless-stopped + image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest} + + open-webui: + build: + context: . + args: + OLLAMA_BASE_URL: '/ollama' + dockerfile: Dockerfile + image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main} + container_name: open-webui + volumes: + - open-webui:/app/backend/data + depends_on: + - ollama + ports: + - ${OPEN_WEBUI_PORT-3000}:8080 + environment: + - 'OLLAMA_BASE_URL=http://ollama:11434' + - 'WEBUI_SECRET_KEY=' + extra_hosts: + - host.docker.internal:host-gateway + restart: unless-stopped + +volumes: + ollama: {} + open-webui: {} diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..325964b1a94a94b3a296a4da8e84525bc24d3fb8 --- /dev/null +++ b/docs/CONTRIBUTING.md @@ -0,0 +1,74 @@ +# Contributing to Open WebUI + +🚀 **Welcome, Contributors!** 🚀 + +Your interest in contributing to Open WebUI is greatly appreciated. This document is here to guide you through the process, ensuring your contributions enhance the project effectively. Let's make Open WebUI even better, together! + +## 📌 Key Points + +### 🦙 Ollama vs. Open WebUI + +It's crucial to distinguish between Ollama and Open WebUI: + +- **Open WebUI** focuses on providing an intuitive and responsive web interface for chat interactions. +- **Ollama** is the underlying technology that powers these interactions. + +If your issue or contribution pertains directly to the core Ollama technology, please direct it to the appropriate [Ollama project repository](https://ollama.com/). Open WebUI's repository is dedicated to the web interface aspect only. + +### 🚨 Reporting Issues + +Noticed something off? Have an idea? Check our [Issues tab](https://github.com/open-webui/open-webui/issues) to see if it's already been reported or suggested. If not, feel free to open a new issue. When reporting an issue, please follow our issue templates. These templates are designed to ensure that all necessary details are provided from the start, enabling us to address your concerns more efficiently. + +> [!IMPORTANT] +> +> - **Template Compliance:** Please be aware that failure to follow the provided issue template, or not providing the requested information at all, will likely result in your issue being closed without further consideration. This approach is critical for maintaining the manageability and integrity of issue tracking. +> +> - **Detail is Key:** To ensure your issue is understood and can be effectively addressed, it's imperative to include comprehensive details. Descriptions should be clear, including steps to reproduce, expected outcomes, and actual results. Lack of sufficient detail may hinder our ability to resolve your issue. + +### 🧭 Scope of Support + +We've noticed an uptick in issues not directly related to Open WebUI but rather to the environment it's run in, especially Docker setups. While we strive to support Docker deployment, understanding Docker fundamentals is crucial for a smooth experience. + +- **Docker Deployment Support**: Open WebUI supports Docker deployment. Familiarity with Docker is assumed. For Docker basics, please refer to the [official Docker documentation](https://docs.docker.com/get-started/overview/). + +- **Advanced Configurations**: Setting up reverse proxies for HTTPS and managing Docker deployments requires foundational knowledge. There are numerous online resources available to learn these skills. Ensuring you have this knowledge will greatly enhance your experience with Open WebUI and similar projects. + +## 💡 Contributing + +Looking to contribute? Great! Here's how you can help: + +### 🛠 Pull Requests + +We welcome pull requests. Before submitting one, please: + +1. Open a discussion regarding your ideas [here](https://github.com/open-webui/open-webui/discussions/new/choose). +2. Follow the project's coding standards and include tests for new features. +3. Update documentation as necessary. +4. Write clear, descriptive commit messages. +5. It's essential to complete your pull request in a timely manner. We move fast, and having PRs hang around too long is not feasible. If you can't get it done within a reasonable time frame, we may have to close it to keep the project moving forward. + +### 📚 Documentation & Tutorials + +Help us make Open WebUI more accessible by improving documentation, writing tutorials, or creating guides on setting up and optimizing the web UI. + +### 🌐 Translations and Internationalization + +Help us make Open WebUI available to a wider audience. In this section, we'll guide you through the process of adding new translations to the project. + +We use JSON files to store translations. You can find the existing translation files in the `src/lib/i18n/locales` directory. Each directory corresponds to a specific language, for example, `en-US` for English (US), `fr-FR` for French (France) and so on. You can refer to [ISO 639 Language Codes](http://www.lingoes.net/en/translator/langcode.htm) to find the appropriate code for a specific language. + +To add a new language: + +- Create a new directory in the `src/lib/i18n/locales` path with the appropriate language code as its name. For instance, if you're adding translations for Spanish (Spain), create a new directory named `es-ES`. +- Copy the American English translation file(s) (from `en-US` directory in `src/lib/i18n/locale`) to this new directory and update the string values in JSON format according to your language. Make sure to preserve the structure of the JSON object. +- Add the language code and its respective title to languages file at `src/lib/i18n/locales/languages.json`. + +### 🤔 Questions & Feedback + +Got questions or feedback? Join our [Discord community](https://discord.gg/5rJgQTnV4s) or open an issue. We're here to help! + +## 🙏 Thank You! + +Your contributions, big or small, make a significant impact on Open WebUI. We're excited to see what you bring to the project! + +Together, let's create an even more powerful tool for the community. 🌟 diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4113c35a78f9d9c7d3096cf851f76f495421fc51 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,3 @@ +# Project workflow + +[![](https://mermaid.ink/img/pako:eNq1k01rAjEQhv_KkFNLFe1N9iAUevFSRVl6Cci4Gd1ANtlmsmtF_O_N7iqtHxR76ClhMu87zwyZvcicIpEIpo-KbEavGjceC2lL9EFnukQbIGXygNye5y9TY7DAZTpZLsjXXVYXg3dapRM4hh9mu5A7-3hTfSXtAtJK21Tsj8dPl3USmJZkGVbebWNKD2rNOjAYl6HJHYdkNBwNpb3U9aNZvzFNYE6h8tFiSyZzBUGJG4K1dwVwTSYQrCptlLRvLt5dA5i2la5Ruk51Ux0VKQjuxPVbAwuyiuFlNgHfzJ5DoxtgqQf1813gnZRLZ5lAYcD7WT1lpGtiQKug9C4jZrrp-Fd-1-Y1bdzo4dvnZDLz7lPHyj8sOgfg4x84E7RTuEaZt8yRZqtDfgT_rwG2u3Dv_ERPFOQL1Cqu2F5aAClCTgVJkcSrojVWJkgh7SGmYhXcYmczkQRfUU9UZfQ4baRI1miYDl_QqlPg?type=png)](https://mermaid.live/edit#pako:eNq1k01rAjEQhv_KkFNLFe1N9iAUevFSRVl6Cci4Gd1ANtlmsmtF_O_N7iqtHxR76ClhMu87zwyZvcicIpEIpo-KbEavGjceC2lL9EFnukQbIGXygNye5y9TY7DAZTpZLsjXXVYXg3dapRM4hh9mu5A7-3hTfSXtAtJK21Tsj8dPl3USmJZkGVbebWNKD2rNOjAYl6HJHYdkNBwNpb3U9aNZvzFNYE6h8tFiSyZzBUGJG4K1dwVwTSYQrCptlLRvLt5dA5i2la5Ruk51Ux0VKQjuxPVbAwuyiuFlNgHfzJ5DoxtgqQf1813gnZRLZ5lAYcD7WT1lpGtiQKug9C4jZrrp-Fd-1-Y1bdzo4dvnZDLz7lPHyj8sOgfg4x84E7RTuEaZt8yRZqtDfgT_rwG2u3Dv_ERPFOQL1Cqu2F5aAClCTgVJkcSrojVWJkgh7SGmYhXcYmczkQRfUU9UZfQ4baRI1miYDl_QqlPg) diff --git a/docs/SECURITY.md b/docs/SECURITY.md new file mode 100644 index 0000000000000000000000000000000000000000..4a0c37e7c81cafd4c7793a666df34decd387c79a --- /dev/null +++ b/docs/SECURITY.md @@ -0,0 +1,20 @@ +# Security Policy + +Our primary goal is to ensure the protection and confidentiality of sensitive data stored by users on open-webui. + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| main | :white_check_mark: | +| others | :x: | + +## Reporting a Vulnerability + +If you discover a security issue within our system, please notify us immediately via a pull request or contact us on discord. + +## Product Security + +We regularly audit our internal processes and system's architecture for vulnerabilities using a combination of automated and manual testing techniques. + +We are planning on implementing SAST and SCA scans in our project soon. diff --git a/docs/apache.md b/docs/apache.md new file mode 100644 index 0000000000000000000000000000000000000000..be07f2345da88cabe39483587984b40f07658cb3 --- /dev/null +++ b/docs/apache.md @@ -0,0 +1,199 @@ +# Hosting UI and Models separately + +Sometimes, its beneficial to host Ollama, separate from the UI, but retain the RAG and RBAC support features shared across users: + +# Open WebUI Configuration + +## UI Configuration + +For the UI configuration, you can set up the Apache VirtualHost as follows: + +``` +# Assuming you have a website hosting this UI at "server.com" + + ServerName server.com + DocumentRoot /home/server/public_html + + ProxyPass / http://server.com:3000/ nocanon + ProxyPassReverse / http://server.com:3000/ + + +``` + +Enable the site first before you can request SSL: + +`a2ensite server.com.conf` # this will enable the site. a2ensite is short for "Apache 2 Enable Site" + +``` +# For SSL + + ServerName server.com + DocumentRoot /home/server/public_html + + ProxyPass / http://server.com:3000/ nocanon + ProxyPassReverse / http://server.com:3000/ + + SSLEngine on + SSLCertificateFile /etc/ssl/virtualmin/170514456861234/ssl.cert + SSLCertificateKeyFile /etc/ssl/virtualmin/170514456861234/ssl.key + SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1 + + SSLProxyEngine on + SSLCACertificateFile /etc/ssl/virtualmin/170514456865864/ssl.ca + + +``` + +I'm using virtualmin here for my SSL clusters, but you can also use certbot directly or your preferred SSL method. To use SSL: + +### Prerequisites. + +Run the following commands: + +`snap install certbot --classic` +`snap apt install python3-certbot-apache` (this will install the apache plugin). + +Navigate to the apache sites-available directory: + +`cd /etc/apache2/sites-available/` + +Create server.com.conf if it is not yet already created, containing the above `` configuration (it should match your case. Modify as necessary). Use the one without the SSL: + +Once it's created, run `certbot --apache -d server.com`, this will request and add/create an SSL keys for you as well as create the server.com.le-ssl.conf + +# Configuring Ollama Server + +On your latest installation of Ollama, make sure that you have setup your api server from the official Ollama reference: + +[Ollama FAQ](https://github.com/jmorganca/ollama/blob/main/docs/faq.md) + +### TL;DR + +The guide doesn't seem to match the current updated service file on linux. So, we will address it here: + +Unless when you're compiling Ollama from source, installing with the standard install `curl https://ollama.com/install.sh | sh` creates a file called `ollama.service` in /etc/systemd/system. You can use nano to edit the file: + +``` +sudo nano /etc/systemd/system/ollama.service +``` + +Add the following lines: + +``` +Environment="OLLAMA_HOST=0.0.0.0:11434" # this line is mandatory. You can also specify +``` + +For instance: + +``` +[Unit] +Description=Ollama Service +After=network-online.target + +[Service] +ExecStart=/usr/local/bin/ollama serve +Environment="OLLAMA_HOST=0.0.0.0:11434" # this line is mandatory. You can also specify 192.168.254.109:DIFFERENT_PORT, format +Environment="OLLAMA_ORIGINS=http://192.168.254.106:11434,https://models.server.city" # this line is optional +User=ollama +Group=ollama +Restart=always +RestartSec=3 +Environment="PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/s> + +[Install] +WantedBy=default.target +``` + +Save the file by pressing CTRL+S, then press CTRL+X + +When your computer restarts, the Ollama server will now be listening on the IP:PORT you specified, in this case 0.0.0.0:11434, or 192.168.254.106:11434 (whatever your local IP address is). Make sure that your router is correctly configured to serve pages from that local IP by forwarding 11434 to your local IP server. + +# Ollama Model Configuration + +## For the Ollama model configuration, use the following Apache VirtualHost setup: + +Navigate to the apache sites-available directory: + +`cd /etc/apache2/sites-available/` + +`nano models.server.city.conf` # match this with your ollama server domain + +Add the folloing virtualhost containing this example (modify as needed): + +``` + +# Assuming you have a website hosting this UI at "models.server.city" + + + DocumentRoot "/var/www/html/" + ServerName models.server.city + + Options None + Require all granted + + + ProxyRequests Off + ProxyPreserveHost On + ProxyAddHeaders On + SSLProxyEngine on + + ProxyPass / http://server.city:1000/ nocanon # or port 11434 + ProxyPassReverse / http://server.city:1000/ # or port 11434 + + SSLCertificateFile /etc/letsencrypt/live/models.server.city/fullchain.pem + SSLCertificateKeyFile /etc/letsencrypt/live/models.server.city/privkey.pem + Include /etc/letsencrypt/options-ssl-apache.conf + + +``` + +You may need to enable the site first (if you haven't done so yet) before you can request SSL: + +`a2ensite models.server.city.conf` + +#### For the SSL part of Ollama server + +Run the following commands: + +Navigate to the apache sites-available directory: + +`cd /etc/apache2/sites-available/` +`certbot --apache -d server.com` + +``` + + DocumentRoot "/var/www/html/" + ServerName models.server.city + + Options None + Require all granted + + + ProxyRequests Off + ProxyPreserveHost On + ProxyAddHeaders On + SSLProxyEngine on + + ProxyPass / http://server.city:1000/ nocanon # or port 11434 + ProxyPassReverse / http://server.city:1000/ # or port 11434 + + RewriteEngine on + RewriteCond %{SERVER_NAME} =models.server.city + RewriteRule ^ https://%{SERVER_NAME}%{REQUEST_URI} [END,NE,R=permanent] + + +``` + +Don't forget to restart/reload Apache with `systemctl reload apache2` + +Open your site at https://server.com! + +**Congratulations**, your _**Open-AI-like Chat-GPT style UI**_ is now serving AI with RAG, RBAC and multimodal features! Download Ollama models if you haven't yet done so! + +If you encounter any misconfiguration or errors, please file an issue or engage with our discussion. There are a lot of friendly developers here to assist you. + +Let's make this UI much more user friendly for everyone! + +Thanks for making open-webui your UI Choice for AI! + +This doc is made by **Bob Reyes**, your **Open-WebUI** fan from the Philippines. diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 0000000000000000000000000000000000000000..8ddaf0749bd6783b785fe63d6ce4b7f3e8a5b5d9 --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,23 @@ +# noqa: INP001 +import os +import shutil +import subprocess +from sys import stderr + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomBuildHook(BuildHookInterface): + def initialize(self, version, build_data): + super().initialize(version, build_data) + stderr.write(">>> Building Open Webui frontend\n") + npm = shutil.which("npm") + if npm is None: + raise RuntimeError( + "NodeJS `npm` is required for building Open Webui but it was not found" + ) + stderr.write("### npm install\n") + subprocess.run([npm, "install"], check=True) # noqa: S603 + stderr.write("\n### npm run build\n") + os.environ["APP_BUILD_HASH"] = version + subprocess.run([npm, "run", "build"], check=True) # noqa: S603 diff --git a/i18next-parser.config.ts b/i18next-parser.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..37ce57ee1474e49787145f82cd6a64ad65773c4d --- /dev/null +++ b/i18next-parser.config.ts @@ -0,0 +1,38 @@ +// i18next-parser.config.ts +import { getLanguages } from './src/lib/i18n/index.ts'; + +const getLangCodes = async () => { + const languages = await getLanguages(); + return languages.map((l) => l.code); +}; + +export default { + contextSeparator: '_', + createOldCatalogs: false, + defaultNamespace: 'translation', + defaultValue: '', + indentation: 2, + keepRemoved: false, + keySeparator: false, + lexers: { + svelte: ['JavascriptLexer'], + js: ['JavascriptLexer'], + ts: ['JavascriptLexer'], + + default: ['JavascriptLexer'] + }, + lineEnding: 'auto', + locales: await getLangCodes(), + namespaceSeparator: false, + output: 'src/lib/i18n/locales/$LOCALE/$NAMESPACE.json', + pluralSeparator: '_', + input: 'src/**/*.{js,svelte}', + sort: true, + verbose: true, + failOnWarnings: false, + failOnUpdate: false, + customValueTemplate: null, + resetDefaultValueLocale: null, + i18nextOptions: null, + yamlOptions: null +}; diff --git a/kubernetes/helm/README.md b/kubernetes/helm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5737007d964de10301f85357e83a91a3bf635248 --- /dev/null +++ b/kubernetes/helm/README.md @@ -0,0 +1,4 @@ +# Helm Charts +Open WebUI Helm Charts are now hosted in a separate repo, which can be found here: https://github.com/open-webui/helm-charts + +The charts are released at https://helm.openwebui.com. \ No newline at end of file diff --git a/kubernetes/manifest/base/ollama-service.yaml b/kubernetes/manifest/base/ollama-service.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8bab65b59effa86ab8e80b6b3f8629ccdbf7ca39 --- /dev/null +++ b/kubernetes/manifest/base/ollama-service.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Service +metadata: + name: ollama-service + namespace: open-webui +spec: + selector: + app: ollama + ports: + - protocol: TCP + port: 11434 + targetPort: 11434 \ No newline at end of file diff --git a/kubernetes/manifest/base/ollama-statefulset.yaml b/kubernetes/manifest/base/ollama-statefulset.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cd1144caf9d506c7cf5922395c9a075621ad0072 --- /dev/null +++ b/kubernetes/manifest/base/ollama-statefulset.yaml @@ -0,0 +1,41 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: ollama + namespace: open-webui +spec: + serviceName: "ollama" + replicas: 1 + selector: + matchLabels: + app: ollama + template: + metadata: + labels: + app: ollama + spec: + containers: + - name: ollama + image: ollama/ollama:latest + ports: + - containerPort: 11434 + resources: + requests: + cpu: "2000m" + memory: "2Gi" + limits: + cpu: "4000m" + memory: "4Gi" + nvidia.com/gpu: "0" + volumeMounts: + - name: ollama-volume + mountPath: /root/.ollama + tty: true + volumeClaimTemplates: + - metadata: + name: ollama-volume + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 30Gi \ No newline at end of file diff --git a/kubernetes/manifest/base/open-webui.yaml b/kubernetes/manifest/base/open-webui.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9c1a599f32690ddd8150be77b9bd8a7cd1b14245 --- /dev/null +++ b/kubernetes/manifest/base/open-webui.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: open-webui \ No newline at end of file diff --git a/kubernetes/manifest/base/webui-deployment.yaml b/kubernetes/manifest/base/webui-deployment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..79a0a9a23c9652bf755273e7801e3002499a5aa6 --- /dev/null +++ b/kubernetes/manifest/base/webui-deployment.yaml @@ -0,0 +1,38 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: open-webui-deployment + namespace: open-webui +spec: + replicas: 1 + selector: + matchLabels: + app: open-webui + template: + metadata: + labels: + app: open-webui + spec: + containers: + - name: open-webui + image: ghcr.io/open-webui/open-webui:main + ports: + - containerPort: 8080 + resources: + requests: + cpu: "500m" + memory: "500Mi" + limits: + cpu: "1000m" + memory: "1Gi" + env: + - name: OLLAMA_BASE_URL + value: "http://ollama-service.open-webui.svc.cluster.local:11434" + tty: true + volumeMounts: + - name: webui-volume + mountPath: /app/backend/data + volumes: + - name: webui-volume + persistentVolumeClaim: + claimName: open-webui-pvc \ No newline at end of file diff --git a/kubernetes/manifest/base/webui-ingress.yaml b/kubernetes/manifest/base/webui-ingress.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dc0b53ccd456e70910683d28bb117a0272992e1c --- /dev/null +++ b/kubernetes/manifest/base/webui-ingress.yaml @@ -0,0 +1,20 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: open-webui-ingress + namespace: open-webui + #annotations: + # Use appropriate annotations for your Ingress controller, e.g., for NGINX: + # nginx.ingress.kubernetes.io/rewrite-target: / +spec: + rules: + - host: open-webui.minikube.local + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: open-webui-service + port: + number: 8080 diff --git a/kubernetes/manifest/base/webui-pvc.yaml b/kubernetes/manifest/base/webui-pvc.yaml new file mode 100644 index 0000000000000000000000000000000000000000..97fb761d422d510a2b725a08c3c52dca53c43ccd --- /dev/null +++ b/kubernetes/manifest/base/webui-pvc.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + app: open-webui + name: open-webui-pvc + namespace: open-webui +spec: + accessModes: ["ReadWriteOnce"] + resources: + requests: + storage: 2Gi \ No newline at end of file diff --git a/kubernetes/manifest/base/webui-service.yaml b/kubernetes/manifest/base/webui-service.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d73845f00a8ec45bc39f629b819542e7d2ced84e --- /dev/null +++ b/kubernetes/manifest/base/webui-service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: open-webui-service + namespace: open-webui +spec: + type: NodePort # Use LoadBalancer if you're on a cloud that supports it + selector: + app: open-webui + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 + # If using NodePort, you can optionally specify the nodePort: + # nodePort: 30000 \ No newline at end of file diff --git a/kubernetes/manifest/kustomization.yaml b/kubernetes/manifest/kustomization.yaml new file mode 100644 index 0000000000000000000000000000000000000000..907bff3e1858858d74ad69f3f5a0290fbaed4108 --- /dev/null +++ b/kubernetes/manifest/kustomization.yaml @@ -0,0 +1,13 @@ +resources: +- base/open-webui.yaml +- base/ollama-service.yaml +- base/ollama-statefulset.yaml +- base/webui-deployment.yaml +- base/webui-service.yaml +- base/webui-ingress.yaml +- base/webui-pvc.yaml + +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +patches: +- path: patches/ollama-statefulset-gpu.yaml diff --git a/kubernetes/manifest/patches/ollama-statefulset-gpu.yaml b/kubernetes/manifest/patches/ollama-statefulset-gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3e42443656d06c6df25075d094e5c8efb180fda1 --- /dev/null +++ b/kubernetes/manifest/patches/ollama-statefulset-gpu.yaml @@ -0,0 +1,17 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: ollama + namespace: open-webui +spec: + selector: + matchLabels: + app: ollama + serviceName: "ollama" + template: + spec: + containers: + - name: ollama + resources: + limits: + nvidia.com/gpu: "1" diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000000000000000000000000000000000000..cf04da5c626b044ce4b7055c418928284803db27 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,10459 @@ +{ + "name": "open-webui", + "version": "0.3.10", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "open-webui", + "version": "0.3.10", + "dependencies": { + "@codemirror/lang-javascript": "^6.2.2", + "@codemirror/lang-python": "^6.1.6", + "@codemirror/theme-one-dark": "^6.1.2", + "@pyscript/core": "^0.4.32", + "@sveltejs/adapter-node": "^1.3.1", + "async": "^3.2.5", + "bits-ui": "^0.19.7", + "codemirror": "^6.0.1", + "crc-32": "^1.2.2", + "dayjs": "^1.11.10", + "eventsource-parser": "^1.1.2", + "file-saver": "^2.0.5", + "highlight.js": "^11.9.0", + "i18next": "^23.10.0", + "i18next-browser-languagedetector": "^7.2.0", + "i18next-resources-to-backend": "^1.2.0", + "idb": "^7.1.1", + "js-sha256": "^0.10.1", + "katex": "^0.16.9", + "marked": "^9.1.0", + "mermaid": "^10.9.1", + "pyodide": "^0.26.1", + "socket.io-client": "^4.2.0", + "sortablejs": "^1.15.2", + "svelte-sonner": "^0.3.19", + "tippy.js": "^6.3.7", + "turndown": "^7.2.0", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^2.0.0", + "@sveltejs/adapter-static": "^2.0.3", + "@sveltejs/kit": "^1.30.0", + "@tailwindcss/typography": "^0.5.10", + "@types/bun": "latest", + "@typescript-eslint/eslint-plugin": "^6.17.0", + "@typescript-eslint/parser": "^6.17.0", + "autoprefixer": "^10.4.16", + "cypress": "^13.8.1", + "eslint": "^8.56.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-cypress": "^3.0.2", + "eslint-plugin-svelte": "^2.30.0", + "i18next-parser": "^8.13.0", + "postcss": "^8.4.31", + "prettier": "^2.8.0", + "prettier-plugin-svelte": "^2.10.1", + "svelte": "^4.0.5", + "svelte-check": "^3.4.3", + "svelte-confetti": "^1.3.2", + "tailwindcss": "^3.3.3", + "tslib": "^2.4.1", + "typescript": "^5.0.0", + "vite": "^4.4.2", + "vitest": "^1.6.0" + } + }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.1.tgz", + "integrity": "sha512-+BIznRzyqBf+2wCTxcKE3wDjfGeCoVE61KSHGpkzqrLi8qxqFwBeUFyId2cxkTmm55fzDGnm0+yCxaxygrLUnQ==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@braintree/sanitize-url": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", + "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==" + }, + "node_modules/@codemirror/autocomplete": { + "version": "6.16.2", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.16.2.tgz", + "integrity": "sha512-MjfDrHy0gHKlPWsvSsikhO1+BOh+eBHNgfH1OXs1+DAf30IonQldgMM3kxLDTG9ktE7kDLaA1j/l7KMPA4KNfw==", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + }, + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/commands": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.6.0.tgz", + "integrity": "sha512-qnY+b7j1UNcTS31Eenuc/5YJB6gQOzkUoNmJQc0rznwqSRpeaWWpjkWy2C/MPTcePpsKJEM26hXrOXl1+nceXg==", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.27.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/lang-javascript": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-javascript/-/lang-javascript-6.2.2.tgz", + "integrity": "sha512-VGQfY+FCc285AhWuwjYxQyUQcYurWlxdKYT4bqwr3Twnd5wP5WSeu52t4tvvuWmljT4EmgEgZCqSieokhtY8hg==", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.6.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0", + "@lezer/javascript": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-python": { + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/@codemirror/lang-python/-/lang-python-6.1.6.tgz", + "integrity": "sha512-ai+01WfZhWqM92UqjnvorkxosZ2aq2u28kHvr+N3gu012XqY2CThD67JPMHnGceRfXPDBmn1HnyqowdpF57bNg==", + "dependencies": { + "@codemirror/autocomplete": "^6.3.2", + "@codemirror/language": "^6.8.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/python": "^1.1.4" + } + }, + "node_modules/@codemirror/language": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.2.tgz", + "integrity": "sha512-kgbTYTo0Au6dCSc/TFy7fK3fpJmgHDv1sG1KNQKJXVi+xBTEeBPY/M30YXiU6mMXeH+YIDLsbrT4ZwNRdtF+SA==", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.1.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.0.tgz", + "integrity": "sha512-lsFofvaw0lnPRJlQylNsC4IRt/1lI4OD/yYslrSGVndOJfStc58v+8p9dgGiD90ktOfL7OhBWns1ZETYgz0EJA==", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/search": { + "version": "6.5.6", + "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.6.tgz", + "integrity": "sha512-rpMgcsh7o0GuCDUXKPvww+muLA1pDJaFrpq/CCHtpQJYz8xopu4D1hPcKRoDD0YlF8gZaqTNIRa4VRBWyhyy7Q==", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==" + }, + "node_modules/@codemirror/theme-one-dark": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.2.tgz", + "integrity": "sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.28.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.28.0.tgz", + "integrity": "sha512-fo7CelaUDKWIyemw4b+J57cWuRkOu4SWCCPfNDkPvfWkGjM9D5racHQXr4EQeYCD6zEBIBxGCeaKkQo+ysl0gA==", + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@cypress/request": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", + "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", + "dev": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "6.10.4", + "safe-buffer": "^5.1.2", + "tough-cookie": "^4.1.3", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@cypress/request/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "dependencies": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + } + }, + "node_modules/@cypress/xvfb/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.0.tgz", + "integrity": "sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==", + "dependencies": { + "@floating-ui/utils": "^0.2.1" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", + "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", + "dependencies": { + "@floating-ui/core": "^1.0.0", + "@floating-ui/utils": "^0.2.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", + "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" + }, + "node_modules/@gulpjs/to-absolute-glob": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@gulpjs/to-absolute-glob/-/to-absolute-glob-4.0.0.tgz", + "integrity": "sha512-kjotm7XJrJ6v+7knhPaRgaT6q8F8K2jiafwYdNHLzmV0uGLuZY43FK6smNSHUPrhq5kX2slCUy+RGG/xGqmIKA==", + "dev": true, + "dependencies": { + "is-negated-glob": "^1.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", + "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", + "dev": true + }, + "node_modules/@internationalized/date": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.5.2.tgz", + "integrity": "sha512-vo1yOMUt2hzp63IutEaTUxROdvQg1qlMRsbCvbay2AK2Gai7wIgCyK5weEX3nHkiLgo4qCXHijFNC/ILhlRpOQ==", + "dependencies": { + "@swc/helpers": "^0.5.0" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@lezer/common": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", + "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==" + }, + "node_modules/@lezer/highlight": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.0.tgz", + "integrity": "sha512-WrS5Mw51sGrpqjlh3d4/fOwpEV2Hd3YOkp9DBt4k8XZQcoTHZFB7sx030A6OcahF4J1nDQAa3jXlTVVYH50IFA==", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/javascript": { + "version": "1.4.16", + "resolved": "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.4.16.tgz", + "integrity": "sha512-84UXR3N7s11MPQHWgMnjb9571fr19MmXnr5zTv2XX0gHXXUvW3uPJ8GCjKrfTXmSdfktjRK0ayKklw+A13rk4g==", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.1.3", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.1.tgz", + "integrity": "sha512-CHsKq8DMKBf9b3yXPDIU4DbH+ZJd/sJdYOW2llbW/HudP5u0VS6Bfq1hLYfgU7uAYGFIyGGQIsSOXGPEErZiJw==", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/python": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/@lezer/python/-/python-1.1.14.tgz", + "integrity": "sha512-ykDOb2Ti24n76PJsSa4ZoDF0zH12BSw1LGfQXCYJhJyOGiFTfGaX0Du66Ze72R+u/P35U+O6I9m8TFXov1JzsA==", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@melt-ui/svelte": { + "version": "0.76.0", + "resolved": "https://registry.npmjs.org/@melt-ui/svelte/-/svelte-0.76.0.tgz", + "integrity": "sha512-X1ktxKujjLjOBt8LBvfckHGDMrkHWceRt1jdsUTf0EH76ikNPP1ofSoiV0IhlduDoCBV+2YchJ8kXCDfDXfC9Q==", + "dependencies": { + "@floating-ui/core": "^1.3.1", + "@floating-ui/dom": "^1.4.5", + "@internationalized/date": "^3.5.0", + "dequal": "^2.0.3", + "focus-trap": "^7.5.2", + "nanoid": "^5.0.4" + }, + "peerDependencies": { + "svelte": ">=3 <5" + } + }, + "node_modules/@mixmark-io/domino": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@mixmark-io/domino/-/domino-2.2.0.tgz", + "integrity": "sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw==" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.25", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.25.tgz", + "integrity": "sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==" + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/@pyscript/core": { + "version": "0.4.32", + "resolved": "https://registry.npmjs.org/@pyscript/core/-/core-0.4.32.tgz", + "integrity": "sha512-WQATzPp1ggf871+PukCmTypzScXkEB1EWD/vg5GNxpM96N6rDPqQ13msuA5XvwU01ZVhL8HHSFDLk4IfaXNGWg==", + "dependencies": { + "@ungap/with-resolvers": "^0.1.0", + "basic-devtools": "^0.1.6", + "polyscript": "^0.12.8", + "sticky-module": "^0.1.1", + "to-json-callback": "^0.1.1", + "type-checked-collections": "^0.1.7" + } + }, + "node_modules/@rollup/plugin-commonjs": { + "version": "25.0.7", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.7.tgz", + "integrity": "sha512-nEvcR+LRjEjsaSsc4x3XZfCCvZIaSMenZu/OiwOKGN2UhQpAYI7ru7czFvyWbErlpoGjnSX3D5Ch5FcMA3kRWQ==", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "commondir": "^1.0.1", + "estree-walker": "^2.0.2", + "glob": "^8.0.3", + "is-reference": "1.2.1", + "magic-string": "^0.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.68.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-json": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", + "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", + "dependencies": { + "@rollup/pluginutils": "^5.1.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "15.2.3", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz", + "integrity": "sha512-j/lym8nf5E21LwBT4Df1VD6hRO2L2iwUeUmP7litikRsVp1H6NWx20NEp0Y7su+7XGc476GnXXc4kFeZNGmaSQ==", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-builtin-module": "^3.2.1", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.0.tgz", + "integrity": "sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.17.2.tgz", + "integrity": "sha512-NM0jFxY8bB8QLkoKxIQeObCaDlJKewVlIEkuyYKm5An1tdVZ966w2+MPQ2l8LBZLjR+SgyV+nRkTIunzOYBMLQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.17.2.tgz", + "integrity": "sha512-yeX/Usk7daNIVwkq2uGoq2BYJKZY1JfyLTaHO/jaiSwi/lsf8fTFoQW/n6IdAsx5tx+iotu2zCJwz8MxI6D/Bw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.17.2.tgz", + "integrity": "sha512-kcMLpE6uCwls023+kknm71ug7MZOrtXo+y5p/tsg6jltpDtgQY1Eq5sGfHcQfb+lfuKwhBmEURDga9N0ol4YPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.17.2.tgz", + "integrity": "sha512-AtKwD0VEx0zWkL0ZjixEkp5tbNLzX+FCqGG1SvOu993HnSz4qDI6S4kGzubrEJAljpVkhRSlg5bzpV//E6ysTQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.17.2.tgz", + "integrity": "sha512-3reX2fUHqN7sffBNqmEyMQVj/CKhIHZd4y631duy0hZqI8Qoqf6lTtmAKvJFYa6bhU95B1D0WgzHkmTg33In0A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.17.2.tgz", + "integrity": "sha512-uSqpsp91mheRgw96xtyAGP9FW5ChctTFEoXP0r5FAzj/3ZRv3Uxjtc7taRQSaQM/q85KEKjKsZuiZM3GyUivRg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.17.2.tgz", + "integrity": "sha512-EMMPHkiCRtE8Wdk3Qhtciq6BndLtstqZIroHiiGzB3C5LDJmIZcSzVtLRbwuXuUft1Cnv+9fxuDtDxz3k3EW2A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.17.2.tgz", + "integrity": "sha512-NMPylUUZ1i0z/xJUIx6VUhISZDRT+uTWpBcjdv0/zkp7b/bQDF+NfnfdzuTiB1G6HTodgoFa93hp0O1xl+/UbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.17.2.tgz", + "integrity": "sha512-T19My13y8uYXPw/L/k0JYaX1fJKFT/PWdXiHr8mTbXWxjVF1t+8Xl31DgBBvEKclw+1b00Chg0hxE2O7bTG7GQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.17.2.tgz", + "integrity": "sha512-BOaNfthf3X3fOWAB+IJ9kxTgPmMqPPH5f5k2DcCsRrBIbWnaJCgX2ll77dV1TdSy9SaXTR5iDXRL8n7AnoP5cg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.17.2.tgz", + "integrity": "sha512-W0UP/x7bnn3xN2eYMql2T/+wpASLE5SjObXILTMPUBDB/Fg/FxC+gX4nvCfPBCbNhz51C+HcqQp2qQ4u25ok6g==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.17.2.tgz", + "integrity": "sha512-Hy7pLwByUOuyaFC6mAr7m+oMC+V7qyifzs/nW2OJfC8H4hbCzOX07Ov0VFk/zP3kBsELWNFi7rJtgbKYsav9QQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.17.2.tgz", + "integrity": "sha512-h1+yTWeYbRdAyJ/jMiVw0l6fOOm/0D1vNLui9iPuqgRGnXA0u21gAqOyB5iHjlM9MMfNOm9RHCQ7zLIzT0x11Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.17.2.tgz", + "integrity": "sha512-tmdtXMfKAjy5+IQsVtDiCfqbynAQE/TQRpWdVataHmhMb9DCoJxp9vLcCBjEQWMiUYxO1QprH/HbY9ragCEFLA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.17.2.tgz", + "integrity": "sha512-7II/QCSTAHuE5vdZaQEwJq2ZACkBpQDOmQsE6D6XUbnBHW8IAhm4eTufL6msLJorzrHDFv3CF8oCA/hSIRuZeQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.17.2.tgz", + "integrity": "sha512-TGGO7v7qOq4CYmSBVEYpI1Y5xDuCEnbVC5Vth8mOsW0gDSzxNrVERPc790IGHsrT2dQSimgMr9Ub3Y1Jci5/8w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, + "node_modules/@socket.io/component-emitter": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", + "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==" + }, + "node_modules/@sveltejs/adapter-auto": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-auto/-/adapter-auto-2.1.1.tgz", + "integrity": "sha512-nzi6x/7/3Axh5VKQ8Eed3pYxastxoa06Y/bFhWb7h3Nu+nGRVxKAy3+hBJgmPCwWScy8n0TsstZjSVKfyrIHkg==", + "dev": true, + "dependencies": { + "import-meta-resolve": "^4.0.0" + }, + "peerDependencies": { + "@sveltejs/kit": "^1.0.0" + } + }, + "node_modules/@sveltejs/adapter-node": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-1.3.1.tgz", + "integrity": "sha512-A0VgRQDCDPzdLNoiAbcOxGw4zT1Mc+n1LwT1OmO350R7WxrEqdMUChPPOd1iMfIDWlP4ie6E2d/WQf5es2d4Zw==", + "dependencies": { + "@rollup/plugin-commonjs": "^25.0.0", + "@rollup/plugin-json": "^6.0.0", + "@rollup/plugin-node-resolve": "^15.0.1", + "rollup": "^3.7.0" + }, + "peerDependencies": { + "@sveltejs/kit": "^1.0.0" + } + }, + "node_modules/@sveltejs/adapter-static": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-2.0.3.tgz", + "integrity": "sha512-VUqTfXsxYGugCpMqQv1U0LIdbR3S5nBkMMDmpjGVJyM6Q2jHVMFtdWJCkeHMySc6mZxJ+0eZK3T7IgmUCDrcUQ==", + "dev": true, + "peerDependencies": { + "@sveltejs/kit": "^1.5.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "1.30.4", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-1.30.4.tgz", + "integrity": "sha512-JSQIQT6XvdchCRQEm7BABxPC56WP5RYVONAi+09S8tmzeP43fBsRlr95bFmsTQM2RHBldfgQk+jgdnsKI75daA==", + "hasInstallScript": true, + "dependencies": { + "@sveltejs/vite-plugin-svelte": "^2.5.0", + "@types/cookie": "^0.5.1", + "cookie": "^0.5.0", + "devalue": "^4.3.1", + "esm-env": "^1.0.0", + "kleur": "^4.1.5", + "magic-string": "^0.30.0", + "mrmime": "^1.0.1", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^2.0.2", + "tiny-glob": "^0.2.9", + "undici": "^5.28.3" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": "^16.14 || >=18" + }, + "peerDependencies": { + "svelte": "^3.54.0 || ^4.0.0-next.0 || ^5.0.0-next.0", + "vite": "^4.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-2.5.3.tgz", + "integrity": "sha512-erhNtXxE5/6xGZz/M9eXsmI7Pxa6MS7jyTy06zN3Ck++ldrppOnOlJwHHTsMC7DHDQdgUp4NAc4cDNQ9eGdB/w==", + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^1.0.4", + "debug": "^4.3.4", + "deepmerge": "^4.3.1", + "kleur": "^4.1.5", + "magic-string": "^0.30.3", + "svelte-hmr": "^0.15.3", + "vitefu": "^0.2.4" + }, + "engines": { + "node": "^14.18.0 || >= 16" + }, + "peerDependencies": { + "svelte": "^3.54.0 || ^4.0.0 || ^5.0.0-next.0", + "vite": "^4.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-1.0.4.tgz", + "integrity": "sha512-zjiuZ3yydBtwpF3bj0kQNV0YXe+iKE545QGZVTaylW3eAzFr+pJ/cwK8lZEaRp4JtaJXhD5DyWAV4AxLh6DgaQ==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": "^14.18.0 || >= 16" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^2.2.0", + "svelte": "^3.54.0 || ^4.0.0", + "vite": "^4.0.0" + } + }, + "node_modules/@swc/helpers": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.7.tgz", + "integrity": "sha512-BVvNZhx362+l2tSwSuyEUV4h7+jk9raNdoTSdLfwTshXJSaGmYKluGRJznziCI3KX02Z19DdsQrdfrpXAU3Hfg==", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.10.tgz", + "integrity": "sha512-Pe8BuPJQJd3FfRnm6H0ulKIGoMEQS+Vq01R6M5aCrFB/ccR/shT+0kXLjouGC1gFLm9hopTFN+DMP0pfwRWzPw==", + "dev": true, + "dependencies": { + "lodash.castarray": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.merge": "^4.6.2", + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders" + } + }, + "node_modules/@types/bun": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/@types/bun/-/bun-1.0.10.tgz", + "integrity": "sha512-Jaz6YYAdm1u3NVlgSyEK+qGmrlLQ20sbWeEoXD64b9w6z/YKYNWlfaphu+xF2Kiy5Tpykm5Q9jIquLegwXx4ng==", + "dev": true, + "dependencies": { + "bun-types": "1.0.33" + } + }, + "node_modules/@types/cookie": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.5.4.tgz", + "integrity": "sha512-7z/eR6O859gyWIAjuvBWFzNURmf2oPBmJlfVWkwehU5nzIyjwBsTh7WMmEEV4JFnHuQ3ex4oyTvfKzcyJVDBNA==" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz", + "integrity": "sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==" + }, + "node_modules/@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==" + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@types/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==", + "dev": true + }, + "node_modules/@types/ms": { + "version": "0.7.34", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", + "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" + }, + "node_modules/@types/node": { + "version": "20.11.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz", + "integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==", + "devOptional": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/pug": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/pug/-/pug-2.0.10.tgz", + "integrity": "sha512-Sk/uYFOBAB7mb74XcpizmH0KOR2Pv3D2Hmrh1Dmy5BmK3MpdSa5kqZcg6EKBdklU0bFXX9gCfzvpnyUehrPIuA==", + "dev": true + }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==" + }, + "node_modules/@types/semver": { + "version": "7.5.8", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", + "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", + "dev": true + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", + "dev": true + }, + "node_modules/@types/sizzle": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz", + "integrity": "sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg==", + "dev": true + }, + "node_modules/@types/symlink-or-copy": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@types/symlink-or-copy/-/symlink-or-copy-1.2.2.tgz", + "integrity": "sha512-MQ1AnmTLOncwEf9IVU+B2e4Hchrku5N67NkgcAHW0p3sdzPe0FNMANxEm6OJUzPniEQGkeT3OROLlCwZJLWFZA==", + "dev": true + }, + "node_modules/@types/unist": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", + "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==" + }, + "node_modules/@types/ws": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/yauzl": { + "version": "2.10.3", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", + "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", + "dev": true, + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz", + "integrity": "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "6.21.0", + "@typescript-eslint/type-utils": "6.21.0", + "@typescript-eslint/utils": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4", + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz", + "integrity": "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "6.21.0", + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/typescript-estree": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz", + "integrity": "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.21.0.tgz", + "integrity": "sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "6.21.0", + "@typescript-eslint/utils": "6.21.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.21.0.tgz", + "integrity": "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz", + "integrity": "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.21.0.tgz", + "integrity": "sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.21.0", + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/typescript-estree": "6.21.0", + "semver": "^7.5.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz", + "integrity": "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==" + }, + "node_modules/@ungap/with-resolvers": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@ungap/with-resolvers/-/with-resolvers-0.1.0.tgz", + "integrity": "sha512-g7f0IkJdPW2xhY7H4iE72DAsIyfuwEFc6JWc2tYFwKDMWWAF699vGjrM348cwQuOXgHpe1gWFe+Eiyjx/ewvvw==" + }, + "node_modules/@vitest/expect": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.6.0.tgz", + "integrity": "sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==", + "dev": true, + "dependencies": { + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "chai": "^4.3.10" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.6.0.tgz", + "integrity": "sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==", + "dev": true, + "dependencies": { + "@vitest/utils": "1.6.0", + "p-limit": "^5.0.0", + "pathe": "^1.1.1" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner/node_modules/p-limit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/runner/node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/snapshot": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.6.0.tgz", + "integrity": "sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==", + "dev": true, + "dependencies": { + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.6.0.tgz", + "integrity": "sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==", + "dev": true, + "dependencies": { + "tinyspy": "^2.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.6.0.tgz", + "integrity": "sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==", + "dev": true, + "dependencies": { + "diff-sequences": "^29.6.3", + "estree-walker": "^3.0.3", + "loupe": "^2.3.7", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/@webreflection/fetch": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@webreflection/fetch/-/fetch-0.1.5.tgz", + "integrity": "sha512-zCcqCJoNLvdeF41asAK71XPlwSPieeRDsE09albBunJEksuYPYNillKNQjf8p5BqSoTKTuKrW3lUm3MNodUC4g==" + }, + "node_modules/acorn": { + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", + "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", + "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dev": true, + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "node_modules/at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", + "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", + "dev": true + }, + "node_modules/axobject-query": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.0.0.tgz", + "integrity": "sha512-+60uv1hiVFhHZeO+Lz0RYzsVHy5Wr1ayX0mwda9KPDVLNJgZ1T9Ny7VmFbLDzxsH0D87I86vgj3gFrjTJUYznw==", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/bare-events": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.2.tgz", + "integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==", + "dev": true, + "optional": true + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/basic-devtools": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/basic-devtools/-/basic-devtools-0.1.6.tgz", + "integrity": "sha512-g9zJ63GmdUesS3/Fwv0B5SYX6nR56TQXmGr+wE5PRTNCnGQMYWhUx/nZB/mMWnQJVLPPAp89oxDNlasdtNkW5Q==" + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dev": true, + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bits-ui": { + "version": "0.19.7", + "resolved": "https://registry.npmjs.org/bits-ui/-/bits-ui-0.19.7.tgz", + "integrity": "sha512-GHUpKvN7QyazhnZNkUy0lxg6W1M6KJHWSZ4a/UGCjPE6nQgk6vKbGysY67PkDtQMknZTZAzVoMj1Eic4IKeCRQ==", + "dependencies": { + "@internationalized/date": "^3.5.1", + "@melt-ui/svelte": "0.76.0", + "nanoid": "^5.0.5" + }, + "peerDependencies": { + "svelte": "^4.0.0" + } + }, + "node_modules/bl": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", + "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", + "dev": true, + "dependencies": { + "buffer": "^6.0.3", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", + "dev": true + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/broccoli-node-api": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/broccoli-node-api/-/broccoli-node-api-1.7.0.tgz", + "integrity": "sha512-QIqLSVJWJUVOhclmkmypJJH9u9s/aWH4+FH6Q6Ju5l+Io4dtwqdPUNmDfw40o6sxhbZHhqGujDJuHTML1wG8Yw==", + "dev": true + }, + "node_modules/broccoli-node-info": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/broccoli-node-info/-/broccoli-node-info-2.2.0.tgz", + "integrity": "sha512-VabSGRpKIzpmC+r+tJueCE5h8k6vON7EIMMWu6d/FyPdtijwLQ7QvzShEw+m3mHoDzUaj/kiZsDYrS8X2adsBg==", + "dev": true, + "engines": { + "node": "8.* || >= 10.*" + } + }, + "node_modules/broccoli-output-wrapper": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/broccoli-output-wrapper/-/broccoli-output-wrapper-3.2.5.tgz", + "integrity": "sha512-bQAtwjSrF4Nu0CK0JOy5OZqw9t5U0zzv2555EA/cF8/a8SLDTIetk9UgrtMVw7qKLKdSpOZ2liZNeZZDaKgayw==", + "dev": true, + "dependencies": { + "fs-extra": "^8.1.0", + "heimdalljs-logger": "^0.1.10", + "symlink-or-copy": "^1.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + } + }, + "node_modules/broccoli-output-wrapper/node_modules/fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/broccoli-output-wrapper/node_modules/jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/broccoli-output-wrapper/node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/broccoli-plugin": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/broccoli-plugin/-/broccoli-plugin-4.0.7.tgz", + "integrity": "sha512-a4zUsWtA1uns1K7p9rExYVYG99rdKeGRymW0qOCNkvDPHQxVi3yVyJHhQbM3EZwdt2E0mnhr5e0c/bPpJ7p3Wg==", + "dev": true, + "dependencies": { + "broccoli-node-api": "^1.7.0", + "broccoli-output-wrapper": "^3.2.5", + "fs-merger": "^3.2.1", + "promise-map-series": "^0.3.0", + "quick-temp": "^0.1.8", + "rimraf": "^3.0.2", + "symlink-or-copy": "^1.3.1" + }, + "engines": { + "node": "10.* || >= 12.*" + } + }, + "node_modules/browserslist": { + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.13" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bun-types": { + "version": "1.0.33", + "resolved": "https://registry.npmjs.org/bun-types/-/bun-types-1.0.33.tgz", + "integrity": "sha512-L5tBIf9g6rBBkvshqysi5NoLQ9NnhSPU1pfJ9FzqoSfofYdyac3WLUnOIuQ+M5za/sooVUOP2ko+E6Tco0OLIA==", + "dev": true, + "dependencies": { + "@types/node": "~20.11.3", + "@types/ws": "~8.5.10" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001600", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001600.tgz", + "integrity": "sha512-+2S9/2JFhYmYaDpZvo0lKkfvuKIglrx68MwOBqMGHhQsNkLjB5xtc/TGoEPs+MxjSyN/72qer2g97nzR641mOQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true + }, + "node_modules/chai": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz", + "integrity": "sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.0.8" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/cheerio": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", + "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", + "dev": true, + "dependencies": { + "cheerio-select": "^2.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "htmlparser2": "^8.0.1", + "parse5": "^7.0.0", + "parse5-htmlparser2-tree-adapter": "^7.0.0" + }, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/cheeriojs/cheerio?sponsor=1" + } + }, + "node_modules/cheerio-select": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", + "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-select": "^5.1.0", + "css-what": "^6.1.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.4.tgz", + "integrity": "sha512-Lm3L0p+/npIQWNIiyF/nAn7T5dnOwR3xNTHXYEBFBFVPXzCVNZ5lqEC/1eo/EVfpDsQ1I+TX4ORPQgp+UI0CRw==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "@colors/colors": "1.5.0" + } + }, + "node_modules/cli-table3/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cli-table3/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cli-truncate/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-stats": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", + "integrity": "sha512-au6ydSpg6nsrigcZ4m8Bc9hxjeW+GJ8xh5G3BJCMt4WXe1H10UNaVOamqQTmrx1kjVuxAHIQSNU6hY4Nsn9/ag==", + "dev": true + }, + "node_modules/code-red": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz", + "integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15", + "@types/estree": "^1.0.1", + "acorn": "^8.10.0", + "estree-walker": "^3.0.3", + "periscopic": "^3.1.0" + } + }, + "node_modules/code-red/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/codedent": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/codedent/-/codedent-0.1.2.tgz", + "integrity": "sha512-qEqzcy5viM3UoCN0jYHZeXZoyd4NZQzYFg0kOBj8O1CgoGG9WYYTF+VeQRsN0OSKFjF3G1u4WDUOtOsWEx6N2w==", + "dependencies": { + "plain-tag": "^0.1.3" + } + }, + "node_modules/codemirror": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz", + "integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/commands": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/search": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, + "node_modules/coincident": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/coincident/-/coincident-1.2.3.tgz", + "integrity": "sha512-Uxz3BMTWIslzeWjuQnizGWVg0j6khbvHUQ8+5BdM7WuJEm4ALXwq3wluYoB+uF68uPBz/oUOeJnYURKyfjexlA==", + "dependencies": { + "@ungap/structured-clone": "^1.2.0", + "@ungap/with-resolvers": "^0.1.0", + "gc-hook": "^0.3.1", + "proxy-target": "^3.0.2" + }, + "optionalDependencies": { + "ws": "^8.16.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "node_modules/colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", + "integrity": "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/confbox": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.7.tgz", + "integrity": "sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "dependencies": { + "layout-base": "^1.0.0" + } + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==" + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "dev": true, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cypress": { + "version": "13.8.1", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.8.1.tgz", + "integrity": "sha512-Uk6ovhRbTg6FmXjeZW/TkbRM07KPtvM5gah1BIMp4Y2s+i/NMxgaLw0+PbYTOdw1+egE0FP3mWRiGcRkjjmhzA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@cypress/request": "^3.0.0", + "@cypress/xvfb": "^1.2.4", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.7.1", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^6.2.1", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.4", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.1", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.8", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "process": "^0.11.10", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.5.3", + "supports-color": "^8.1.1", + "tmp": "~0.2.1", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + }, + "bin": { + "cypress": "bin/cypress" + }, + "engines": { + "node": "^16.0.0 || ^18.0.0 || >=20.0.0" + } + }, + "node_modules/cypress/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/cypress/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/cypress/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cypress/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/cytoscape": { + "version": "3.29.2", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.29.2.tgz", + "integrity": "sha512-2G1ycU28Nh7OHT9rkXRLpCDP30MKH1dXJORZuBhtEhEW7pKwgPi77ImqlCWinouyE1PNepIOGZBOrE84DG7LyQ==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "dependencies": { + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "dependencies": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "dependencies": { + "internmap": "^1.0.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==" + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==" + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dagre-d3-es": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.10.tgz", + "integrity": "sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==", + "dependencies": { + "d3": "^7.8.2", + "lodash-es": "^4.17.21" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/dayjs": { + "version": "1.11.10", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", + "integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==" + }, + "node_modules/de-indent": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz", + "integrity": "sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/devalue": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-4.3.2.tgz", + "integrity": "sha512-KqFl6pOgOW+Y6wJgu80rHpo2/3H07vr8ntR9rkkFIRETewbf5GaYYcakYfiKz89K+sLsuPkQIZaXDMjUObZwWg==" + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/dompurify": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.5.tgz", + "integrity": "sha512-lwG+n5h8QNpxtyrJW/gJWckL+1/DQiYMX8f7t8Z2AZTPw1esVrqjI63i7Zc2Gz0aKzLVMYC1V1PL/ky+aY/NgA==" + }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dev": true, + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.715", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.715.tgz", + "integrity": "sha512-XzWNH4ZSa9BwVUQSDorPWAUQ5WGuYz7zJUNpNif40zFCiCl20t8zgylmreNmn26h5kiyw2lg7RfTmeMBsDklqg==", + "dev": true + }, + "node_modules/elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/engine.io-client": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.4.tgz", + "integrity": "sha512-GeZeeRjpD2qf49cZQ0Wvh/8NJNfeXkXXcoGh+F77oEAgo9gUHwT1fCRxSNU+YEEaysOJTnsFHmM5oAcPy4ntvQ==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.1", + "engine.io-parser": "~5.2.1", + "ws": "~8.17.1", + "xmlhttprequest-ssl": "~2.0.0" + } + }, + "node_modules/engine.io-parser": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.2.tgz", + "integrity": "sha512-RcyUFKA93/CXH20l4SoVvzZfrSDMOTUS3bWVpTt2FuFP+XYrL8i8oonHP7WInRyVHXh0n/ORtoeiE1os+8qkSw==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/enquirer": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.4.1.tgz", + "integrity": "sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ensure-posix-path": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ensure-posix-path/-/ensure-posix-path-1.1.1.tgz", + "integrity": "sha512-VWU0/zXzVbeJNXvME/5EmLuEj2TauvoaTz6aFYK1Z92JCBlDlZ3Gu0tuGR42kpW1754ywTs+QB0g5TP0oj9Zaw==", + "dev": true + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/eol": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/eol/-/eol-0.9.1.tgz", + "integrity": "sha512-Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg==", + "dev": true + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es6-promise": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==", + "dev": true + }, + "node_modules/esbuild": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-compat-utils": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.1.2.tgz", + "integrity": "sha512-Jia4JDldWnFNIru1Ehx1H5s9/yxiRHY/TimCuUc0jNexew3cF1gI6CYZil1ociakfWO3rRqFjl1mskBblB3RYg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, + "node_modules/eslint-config-prettier": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz", + "integrity": "sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-cypress": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-cypress/-/eslint-plugin-cypress-3.0.2.tgz", + "integrity": "sha512-5hIWc3SqXSuR+Sd7gmNMzx8yJ3LWQQS0e+qLvEVF4C1JfFtu1s9imtEm1KxlCBCcKb7+6CyR9KQYs0GiI02AlA==", + "dev": true, + "dependencies": { + "globals": "^13.20.0" + }, + "peerDependencies": { + "eslint": ">=7 <9" + } + }, + "node_modules/eslint-plugin-svelte": { + "version": "2.35.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-2.35.1.tgz", + "integrity": "sha512-IF8TpLnROSGy98Z3NrsKXWDSCbNY2ReHDcrYTuXZMbfX7VmESISR78TWgO9zdg4Dht1X8coub5jKwHzP0ExRug==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@jridgewell/sourcemap-codec": "^1.4.14", + "debug": "^4.3.1", + "eslint-compat-utils": "^0.1.2", + "esutils": "^2.0.3", + "known-css-properties": "^0.29.0", + "postcss": "^8.4.5", + "postcss-load-config": "^3.1.4", + "postcss-safe-parser": "^6.0.0", + "postcss-selector-parser": "^6.0.11", + "semver": "^7.5.3", + "svelte-eslint-parser": ">=0.33.0 <1.0.0" + }, + "engines": { + "node": "^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0-0", + "svelte": "^3.37.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-svelte/node_modules/postcss-selector-parser": { + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/esm-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.0.0.tgz", + "integrity": "sha512-Cf6VksWPsTuW01vU9Mk/3vRue91Zevka5SjyNf3nEpokFRuqt/KjUQoGAwq9qMmhpLTHmXzSIrFRw8zxWzmFBA==" + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==" + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==", + "dev": true + }, + "node_modules/eventsource-parser": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz", + "integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==", + "engines": { + "node": ">=14.18" + } + }, + "node_modules/execa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "dev": true, + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/file-saver": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz", + "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==" + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", + "dev": true + }, + "node_modules/focus-trap": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.5.4.tgz", + "integrity": "sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==", + "dependencies": { + "tabbable": "^6.2.0" + } + }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "dev": true, + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs-extra": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fs-merger": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/fs-merger/-/fs-merger-3.2.1.tgz", + "integrity": "sha512-AN6sX12liy0JE7C2evclwoo0aCG3PFulLjrTLsJpWh/2mM+DinhpSGqYLbHBBbIW1PLRNcFhJG8Axtz8mQW3ug==", + "dev": true, + "dependencies": { + "broccoli-node-api": "^1.7.0", + "broccoli-node-info": "^2.1.0", + "fs-extra": "^8.0.1", + "fs-tree-diff": "^2.0.1", + "walk-sync": "^2.2.0" + } + }, + "node_modules/fs-merger/node_modules/fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/fs-merger/node_modules/jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/fs-merger/node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/fs-mkdirp-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-2.0.1.tgz", + "integrity": "sha512-UTOY+59K6IA94tec8Wjqm0FSh5OVudGNB0NL/P6fB3HiE3bYOY3VYBGijsnOHNkQSwC1FKkU77pmq7xp9CskLw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.8", + "streamx": "^2.12.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/fs-tree-diff": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fs-tree-diff/-/fs-tree-diff-2.0.1.tgz", + "integrity": "sha512-x+CfAZ/lJHQqwlD64pYM5QxWjzWhSjroaVsr8PW831zOApL55qPibed0c+xebaLWVr2BnHFoHdrwOv8pzt8R5A==", + "dev": true, + "dependencies": { + "@types/symlink-or-copy": "^1.2.0", + "heimdalljs-logger": "^0.1.7", + "object-assign": "^4.1.0", + "path-posix": "^1.0.0", + "symlink-or-copy": "^1.1.8" + }, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gc-hook": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/gc-hook/-/gc-hook-0.3.1.tgz", + "integrity": "sha512-E5M+O/h2o7eZzGhzRZGex6hbB3k4NWqO0eA+OzLRLXxhdbYPajZnynPwAtphnh+cRHPwsj5Z80dqZlfI4eK55A==" + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dev": true, + "dependencies": { + "async": "^3.2.0" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob-stream": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-8.0.0.tgz", + "integrity": "sha512-CdIUuwOkYNv9ZadR3jJvap8CMooKziQZ/QCSPhEb7zqfsEI5YnPmvca7IvbaVE3z58ZdUYD2JsU6AUWjL8WZJA==", + "dev": true, + "dependencies": { + "@gulpjs/to-absolute-glob": "^4.0.0", + "anymatch": "^3.1.3", + "fastq": "^1.13.0", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "is-negated-glob": "^1.0.0", + "normalize-path": "^3.0.0", + "streamx": "^2.12.5" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/global-dirs": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", + "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", + "dev": true, + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==" + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==" + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/gulp-sort": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/gulp-sort/-/gulp-sort-2.0.0.tgz", + "integrity": "sha512-MyTel3FXOdh1qhw1yKhpimQrAmur9q1X0ZigLmCOxouQD+BD3za9/89O+HfbgBQvvh4igEbp0/PUWO+VqGYG1g==", + "dev": true, + "dependencies": { + "through2": "^2.0.1" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/heimdalljs": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/heimdalljs/-/heimdalljs-0.2.6.tgz", + "integrity": "sha512-o9bd30+5vLBvBtzCPwwGqpry2+n0Hi6H1+qwt6y+0kwRHGGF8TFIhJPmnuM0xO97zaKrDZMwO/V56fAnn8m/tA==", + "dev": true, + "dependencies": { + "rsvp": "~3.2.1" + } + }, + "node_modules/heimdalljs-logger": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/heimdalljs-logger/-/heimdalljs-logger-0.1.10.tgz", + "integrity": "sha512-pO++cJbhIufVI/fmB/u2Yty3KJD0TqNPecehFae0/eps0hkZ3b4Zc/PezUMOpYuHFQbA7FxHZxa305EhmjLj4g==", + "dev": true, + "dependencies": { + "debug": "^2.2.0", + "heimdalljs": "^0.2.6" + } + }, + "node_modules/heimdalljs-logger/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/heimdalljs-logger/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/heimdalljs/node_modules/rsvp": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-3.2.1.tgz", + "integrity": "sha512-Rf4YVNYpKjZ6ASAmibcwTNciQ5Co5Ztq6iZPEykHpkoflnD/K5ryE/rHehFsTm4NJj8nKDhbi3eKBWGogmNnkg==", + "dev": true + }, + "node_modules/highlight.js": { + "version": "11.9.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.9.0.tgz", + "integrity": "sha512-fJ7cW7fQGCYAkgv4CPfwFHrfd/cLS4Hau96JuJ+ZTOWhjnhoeN1ub1tFmALm/+lW5z4WCAuAV9bm05AP0mS6Gw==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/html-escaper": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-3.0.3.tgz", + "integrity": "sha512-RuMffC89BOWQoY0WKGpIhn5gX3iI54O6nRA0yC124NYVtzjmFWBIiFd8M0x+ZdX0P9R4lADg1mgP8C7PxGOWuQ==" + }, + "node_modules/htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + }, + "node_modules/http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true, + "engines": { + "node": ">=8.12.0" + } + }, + "node_modules/i18next": { + "version": "23.10.1", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.10.1.tgz", + "integrity": "sha512-NDiIzFbcs3O9PXpfhkjyf7WdqFn5Vq6mhzhtkXzj51aOcNuPNcTwuYNuXCpHsanZGHlHKL35G7huoFeVic1hng==", + "funding": [ + { + "type": "individual", + "url": "https://locize.com" + }, + { + "type": "individual", + "url": "https://locize.com/i18next.html" + }, + { + "type": "individual", + "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" + } + ], + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, + "node_modules/i18next-browser-languagedetector": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/i18next-browser-languagedetector/-/i18next-browser-languagedetector-7.2.0.tgz", + "integrity": "sha512-U00DbDtFIYD3wkWsr2aVGfXGAj2TgnELzOX9qv8bT0aJtvPV9CRO77h+vgmHFBMe7LAxdwvT/7VkCWGya6L3tA==", + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, + "node_modules/i18next-parser": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/i18next-parser/-/i18next-parser-8.13.0.tgz", + "integrity": "sha512-XU7resoeNcpJazh29OncQQUH6HsgCxk06RqBBDAmLHldafxopfCHY1vElyG/o3EY0Sn7XjelAmPTV0SgddJEww==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.23.2", + "broccoli-plugin": "^4.0.7", + "cheerio": "^1.0.0-rc.2", + "colors": "1.4.0", + "commander": "~11.1.0", + "eol": "^0.9.1", + "esbuild": "^0.20.1", + "fs-extra": "^11.1.0", + "gulp-sort": "^2.0.0", + "i18next": "^23.5.1", + "js-yaml": "4.1.0", + "lilconfig": "^3.0.0", + "rsvp": "^4.8.2", + "sort-keys": "^5.0.0", + "typescript": "^5.0.4", + "vinyl": "~3.0.0", + "vinyl-fs": "^4.0.0", + "vue-template-compiler": "^2.6.11" + }, + "bin": { + "i18next": "bin/cli.js" + }, + "engines": { + "node": ">=16.0.0 || >=18.0.0 || >=20.0.0", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/i18next-resources-to-backend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/i18next-resources-to-backend/-/i18next-resources-to-backend-1.2.0.tgz", + "integrity": "sha512-8f1l03s+QxDmCfpSXCh9V+AFcxAwIp0UaroWuyOx+hmmv8484GcELHs+lnu54FrNij8cDBEXvEwhzZoXsKcVpg==", + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/idb": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/idb/-/idb-7.1.1.tgz", + "integrity": "sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ==" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-meta-resolve": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.0.0.tgz", + "integrity": "sha512-okYUR7ZQPH+efeuMJGlq4f8ubUgO50kByRPyt/Cy1Io4PSRsPjxME+YlVaCOx+NIToW7hCsZNFJyTPFFKepRSA==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "dependencies": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==" + }, + "node_modules/is-negated-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz", + "integrity": "sha512-czXVVn/QEmgvej1f50BZ648vUI+em0xqMq2Sn+QncCLN4zj1UAxlT+kw/6ggQTOaZPd1HqKQGEqbpQVtJucWug==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-reference": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-valid-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz", + "integrity": "sha512-AhiROmoEFDSsjx8hW+5sGwgKVIORcXnrlAx/R0ZSeaPw70Vw0CqkGBBhHGL58Uox2eXnU1AnvXJl1XlyedO5bA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true + }, + "node_modules/jackspeak": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jiti": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.0.tgz", + "integrity": "sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-sha256": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/js-sha256/-/js-sha256-0.10.1.tgz", + "integrity": "sha512-5obBtsz9301ULlsgggLg542s/jqtddfOpV5KJc4hajc9JV8GeY2gZHSVpYBn4nWqAUTJ9v+xwtbJ1mIBgIH5Vw==" + }, + "node_modules/js-tokens": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", + "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "node_modules/katex": { + "version": "0.16.10", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz", + "integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/known-css-properties": { + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.29.0.tgz", + "integrity": "sha512-Ne7wqW7/9Cz54PDt4I3tcV+hAyat8ypyOGzYRJQfdxnnjeWsTxt1cy8pjvvKeI5kfXuyvULyeeAvwvvtAX3ayQ==", + "dev": true + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==" + }, + "node_modules/lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", + "dev": true, + "engines": { + "node": "> 0.8" + } + }, + "node_modules/lead": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/lead/-/lead-4.0.0.tgz", + "integrity": "sha512-DpMa59o5uGUWWjruMp71e6knmwKU3jRBBn1kjuLWN9EeIOxNeSAwvHf03WIl8g/ZMR2oSQC9ej3yeLBwdDc/pg==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dev": true, + "dependencies": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "enquirer": ">= 2.3.0 < 3" + }, + "peerDependenciesMeta": { + "enquirer": { + "optional": true + } + } + }, + "node_modules/listr2/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/listr2/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/listr2/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/local-pkg": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz", + "integrity": "sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==", + "dev": true, + "dependencies": { + "mlly": "^1.4.2", + "pkg-types": "^1.0.3" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + }, + "node_modules/lodash.castarray": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz", + "integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==", + "dev": true + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/log-update/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/magic-string": { + "version": "0.30.8", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.8.tgz", + "integrity": "sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/marked": { + "version": "9.1.6", + "resolved": "https://registry.npmjs.org/marked/-/marked-9.1.6.tgz", + "integrity": "sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 16" + } + }, + "node_modules/matcher-collection": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/matcher-collection/-/matcher-collection-2.0.1.tgz", + "integrity": "sha512-daE62nS2ZQsDg9raM0IlZzLmI2u+7ZapXBwdoeBUKAYERPDDIc0qNqA8E0Rp2D+gspKR7BgIFP52GeujaGXWeQ==", + "dev": true, + "dependencies": { + "@types/minimatch": "^3.0.3", + "minimatch": "^3.0.2" + }, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/matcher-collection/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/matcher-collection/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mermaid": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.9.1.tgz", + "integrity": "sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==", + "dependencies": { + "@braintree/sanitize-url": "^6.0.1", + "@types/d3-scale": "^4.0.3", + "@types/d3-scale-chromatic": "^3.0.0", + "cytoscape": "^3.28.1", + "cytoscape-cose-bilkent": "^4.1.0", + "d3": "^7.4.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.10", + "dayjs": "^1.11.7", + "dompurify": "^3.0.5", + "elkjs": "^0.9.0", + "katex": "^0.16.9", + "khroma": "^2.0.0", + "lodash-es": "^4.17.21", + "mdast-util-from-markdown": "^1.3.0", + "non-layered-tidy-tree-layout": "^2.0.2", + "stylis": "^4.1.3", + "ts-dedent": "^2.2.0", + "uuid": "^9.0.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", + "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz", + "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz", + "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", + "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mktemp": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/mktemp/-/mktemp-0.4.0.tgz", + "integrity": "sha512-IXnMcJ6ZyTuhRmJSjzvHSRhlVPiN9Jwc6e59V0bEJ0ba6OBeX2L0E+mRN1QseeOF4mM+F1Rit6Nh7o+rl2Yn/A==", + "dev": true, + "engines": { + "node": ">0.9" + } + }, + "node_modules/mlly": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.0.tgz", + "integrity": "sha512-U9SDaXGEREBYQgfejV97coK0UL1r+qnF2SyO9A3qcI8MzKnsIFKHNVEkrDyNncQTKQQumsasmeq84eNMdBfsNQ==", + "dev": true, + "dependencies": { + "acorn": "^8.11.3", + "pathe": "^1.1.2", + "pkg-types": "^1.1.0", + "ufo": "^1.5.3" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", + "integrity": "sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==", + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.6.tgz", + "integrity": "sha512-rRq0eMHoGZxlvaFOUdK1Ev83Bd1IgzzR+WJ3IbDJ7QOSdAxYjlurSPqFs9s4lJg29RT6nPwizFtJhQS6V5xgiA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^18 || >=20" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "dev": true + }, + "node_modules/non-layered-tidy-tree-layout": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/non-layered-tidy-tree-layout/-/non-layered-tidy-tree-layout-2.0.2.tgz", + "integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/now-and-later": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/now-and-later/-/now-and-later-3.0.0.tgz", + "integrity": "sha512-pGO4pzSdaxhWTGkfSfHx3hVzJVslFPwBp2Myq9MYN/ChfJZF87ochMAXnvz6/58RJSf5ik2q9tXprBBrk2cpcg==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dev": true, + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==", + "dev": true + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", + "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", + "dev": true, + "dependencies": { + "domhandler": "^5.0.2", + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/path-posix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/path-posix/-/path-posix-1.0.0.tgz", + "integrity": "sha512-1gJ0WpNIiYcQydgg3Ed8KzvIqTsDpNwq+cjBCssvBtuTWjEqY1AW+i+OepiEMqDCzyro9B2sLAe4RBPajMYFiA==", + "dev": true + }, + "node_modules/path-scurry": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", + "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", + "dev": true, + "dependencies": { + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz", + "integrity": "sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/pathe": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", + "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true + }, + "node_modules/periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, + "node_modules/periscopic/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/periscopic/node_modules/is-reference": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz", + "integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-types": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.1.1.tgz", + "integrity": "sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==", + "dev": true, + "dependencies": { + "confbox": "^0.1.7", + "mlly": "^1.7.0", + "pathe": "^1.1.2" + } + }, + "node_modules/plain-tag": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/plain-tag/-/plain-tag-0.1.3.tgz", + "integrity": "sha512-yyVAOFKTAElc7KdLt2+UKGExNYwYb/Y/WE9i+1ezCQsJE8gbKSjewfpRqK2nQgZ4d4hhAAGgDCOcIZVilqE5UA==" + }, + "node_modules/polyscript": { + "version": "0.12.8", + "resolved": "https://registry.npmjs.org/polyscript/-/polyscript-0.12.8.tgz", + "integrity": "sha512-kcG3W9jU/s1sYjWOTAa2jAh5D2jm3zJRi+glSTsC+lA3D1b/Sd67pEIGpyL9bWNKYSimqAx4se6jAhQjJZ7+jQ==", + "dependencies": { + "@ungap/structured-clone": "^1.2.0", + "@ungap/with-resolvers": "^0.1.0", + "@webreflection/fetch": "^0.1.5", + "basic-devtools": "^0.1.6", + "codedent": "^0.1.2", + "coincident": "^1.2.3", + "gc-hook": "^0.3.1", + "html-escaper": "^3.0.3", + "proxy-target": "^3.0.2", + "sticky-module": "^0.1.1", + "to-json-callback": "^0.1.1" + } + }, + "node_modules/postcss": { + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "dev": true, + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "dev": true, + "dependencies": { + "lilconfig": "^2.0.5", + "yaml": "^1.10.2" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-load-config/node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/postcss-nested": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.1.tgz", + "integrity": "sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.11" + }, + "engines": { + "node": ">=12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-nested/node_modules/postcss-selector-parser": { + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-safe-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-6.0.0.tgz", + "integrity": "sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==", + "dev": true, + "engines": { + "node": ">=12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.3.3" + } + }, + "node_modules/postcss-scss": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/postcss-scss/-/postcss-scss-4.0.9.tgz", + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-scss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.4.29" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true + }, + "node_modules/postcss/node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-svelte": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/prettier-plugin-svelte/-/prettier-plugin-svelte-2.10.1.tgz", + "integrity": "sha512-Wlq7Z5v2ueCubWo0TZzKc9XHcm7TDxqcuzRuGd0gcENfzfT4JZ9yDlCbEgxWgiPmLHkBjfOtpAWkcT28MCDpUQ==", + "dev": true, + "peerDependencies": { + "prettier": "^1.16.4 || ^2.0.0", + "svelte": "^3.2.0 || ^4.0.0-next.0" + } + }, + "node_modules/pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/promise-map-series": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/promise-map-series/-/promise-map-series-0.3.0.tgz", + "integrity": "sha512-3npG2NGhTc8BWBolLLf8l/92OxMGaRLbqvIh9wjCHhDXNvk4zsxaTaCpiCunW09qWPrN2zeNSNwRLVBrQQtutA==", + "dev": true, + "engines": { + "node": "10.* || >= 12.*" + } + }, + "node_modules/proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", + "dev": true + }, + "node_modules/proxy-target": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/proxy-target/-/proxy-target-3.0.2.tgz", + "integrity": "sha512-FFE1XNwXX/FNC3/P8HiKaJSy/Qk68RitG/QEcLy/bVnTAPlgTAWPZKh0pARLAnpfXQPKyalBhk009NRTgsk8vQ==" + }, + "node_modules/psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", + "dev": true + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/pyodide": { + "version": "0.26.1", + "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.26.1.tgz", + "integrity": "sha512-P+Gm88nwZqY7uBgjbQH8CqqU6Ei/rDn7pS1t02sNZsbyLJMyE2OVXjgNuqVT3KqYWnyGREUN0DbBUCJqk8R0ew==", + "dependencies": { + "ws": "^8.5.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/qs": { + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", + "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", + "dev": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/queue-tick": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", + "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==", + "dev": true + }, + "node_modules/quick-temp": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/quick-temp/-/quick-temp-0.1.8.tgz", + "integrity": "sha512-YsmIFfD9j2zaFwJkzI6eMG7y0lQP7YeWzgtFgNl38pGWZBSXJooZbOWwkcRot7Vt0Fg9L23pX0tqWU3VvLDsiA==", + "dev": true, + "dependencies": { + "mktemp": "~0.4.0", + "rimraf": "^2.5.4", + "underscore.string": "~3.3.4" + } + }, + "node_modules/quick-temp/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/quick-temp/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/quick-temp/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/quick-temp/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", + "dev": true + }, + "node_modules/replace-ext": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz", + "integrity": "sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==", + "dev": true, + "dependencies": { + "throttleit": "^1.0.0" + } + }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-options": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/resolve-options/-/resolve-options-2.0.0.tgz", + "integrity": "sha512-/FopbmmFOQCfsCx77BRFdKOniglTiHumLgwvd6IDPihy1GKkadZbgQJBcTb2lMzSR1pndzd96b1nZrreZ7+9/A==", + "dev": true, + "dependencies": { + "value-or-function": "^4.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", + "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "dev": true + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==" + }, + "node_modules/rollup": { + "version": "3.29.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", + "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=14.18.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/rsvp": { + "version": "4.8.5", + "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", + "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==", + "dev": true, + "engines": { + "node": "6.* || >= 7.*" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/sander": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/sander/-/sander-0.5.1.tgz", + "integrity": "sha512-3lVqBir7WuKDHGrKRDn/1Ye3kwpXaDOMsiRP1wd6wpZW56gJhsbp5RqQpA6JG/P+pkXizygnr1dKR8vzWaVsfA==", + "dev": true, + "dependencies": { + "es6-promise": "^3.1.2", + "graceful-fs": "^4.1.3", + "mkdirp": "^0.5.1", + "rimraf": "^2.5.2" + } + }, + "node_modules/sander/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/sander/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sander/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/sander/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz", + "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==" + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sirv": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", + "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/sirv/node_modules/mrmime": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", + "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", + "engines": { + "node": ">=10" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/socket.io-client": { + "version": "4.7.5", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.7.5.tgz", + "integrity": "sha512-sJ/tqHOCe7Z50JCBCXrsY3I2k03iOiUe+tj1OmKeD2lXPiGH/RUCdTZFoqVyN7l1MnpIzPrGtLcijffmeouNlQ==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.2", + "engine.io-client": "~6.5.2", + "socket.io-parser": "~4.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/socket.io-parser": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz", + "integrity": "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/sorcery": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/sorcery/-/sorcery-0.11.0.tgz", + "integrity": "sha512-J69LQ22xrQB1cIFJhPfgtLuI6BpWRiWu1Y3vSsIwK/eAScqJxd/+CJlUuHQRdX2C9NGFamq+KqNywGgaThwfHw==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.14", + "buffer-crc32": "^0.2.5", + "minimist": "^1.2.0", + "sander": "^0.5.0" + }, + "bin": { + "sorcery": "bin/sorcery" + } + }, + "node_modules/sort-keys": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-5.0.0.tgz", + "integrity": "sha512-Pdz01AvCAottHTPQGzndktFNdbRA75BgOfeT1hH+AMnJFv8lynkPi42rfeEhpx1saTEI3YNMWxfqu0sFD1G8pw==", + "dev": true, + "dependencies": { + "is-plain-obj": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sortablejs": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.15.2.tgz", + "integrity": "sha512-FJF5jgdfvoKn1MAKSdGs33bIqLi3LmsgVTliuX6iITj834F+JRQZN90Z93yql8h0K2t0RwDPBmxwlbZfDcxNZA==" + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true + }, + "node_modules/sshpk": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz", + "integrity": "sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==", + "dev": true, + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true + }, + "node_modules/std-env": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz", + "integrity": "sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==", + "dev": true + }, + "node_modules/sticky-module": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/sticky-module/-/sticky-module-0.1.1.tgz", + "integrity": "sha512-IuYgnyIMUx/m6rtu14l/LR2MaqOLtpXcWkxPmtPsiScRHEo+S4Tojk+DWFHOncSdFX/OsoLOM4+T92yOmI1AMw==" + }, + "node_modules/stream-composer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stream-composer/-/stream-composer-1.0.2.tgz", + "integrity": "sha512-bnBselmwfX5K10AH6L4c8+S5lgZMWI7ZYrz2rvYjCPB2DIMC4Ig8OpxGpNJSxRZ58oti7y1IcNvjBAz9vW5m4w==", + "dev": true, + "dependencies": { + "streamx": "^2.13.2" + } + }, + "node_modules/streamx": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz", + "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==", + "dev": true, + "dependencies": { + "fast-fifo": "^1.1.0", + "queue-tick": "^1.0.1" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", + "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", + "dev": true, + "dependencies": { + "js-tokens": "^9.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==" + }, + "node_modules/stylis": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.2.tgz", + "integrity": "sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==" + }, + "node_modules/sucrase": { + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", + "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "^10.3.10", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/sucrase/node_modules/glob": { + "version": "10.3.10", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", + "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.5", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svelte": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.12.tgz", + "integrity": "sha512-d8+wsh5TfPwqVzbm4/HCXC783/KPHV60NvwitJnyTA5lWn1elhXMNWhXGCJ7PwPa8qFUnyJNIyuIRt2mT0WMug==", + "dependencies": { + "@ampproject/remapping": "^2.2.1", + "@jridgewell/sourcemap-codec": "^1.4.15", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/estree": "^1.0.1", + "acorn": "^8.9.0", + "aria-query": "^5.3.0", + "axobject-query": "^4.0.0", + "code-red": "^1.0.3", + "css-tree": "^2.3.1", + "estree-walker": "^3.0.3", + "is-reference": "^3.0.1", + "locate-character": "^3.0.0", + "magic-string": "^0.30.4", + "periscopic": "^3.1.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/svelte-check": { + "version": "3.6.8", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-3.6.8.tgz", + "integrity": "sha512-rhXU7YCDtL+lq2gCqfJDXKTxJfSsCgcd08d7VWBFxTw6IWIbMWSaASbAOD3N0VV9TYSSLUqEBiratLd8WxAJJA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.17", + "chokidar": "^3.4.1", + "fast-glob": "^3.2.7", + "import-fresh": "^3.2.1", + "picocolors": "^1.0.0", + "sade": "^1.7.4", + "svelte-preprocess": "^5.1.3", + "typescript": "^5.0.3" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "peerDependencies": { + "svelte": "^3.55.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0" + } + }, + "node_modules/svelte-confetti": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/svelte-confetti/-/svelte-confetti-1.3.2.tgz", + "integrity": "sha512-R+JwFTC7hIgWVA/OuXrkj384B7CMoceb0t9VacyW6dORTQg0pWojVBB8Bo3tM30cLEQE48Fekzqgx+XSzHESMA==", + "dev": true, + "peerDependencies": { + "svelte": "^4.0.0" + } + }, + "node_modules/svelte-eslint-parser": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-0.33.1.tgz", + "integrity": "sha512-vo7xPGTlKBGdLH8T5L64FipvTrqv3OQRx9d2z5X05KKZDlF4rQk8KViZO4flKERY+5BiVdOh7zZ7JGJWo5P0uA==", + "dev": true, + "dependencies": { + "eslint-scope": "^7.0.0", + "eslint-visitor-keys": "^3.0.0", + "espree": "^9.0.0", + "postcss": "^8.4.29", + "postcss-scss": "^4.0.8" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "svelte": "^3.37.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/svelte-hmr": { + "version": "0.15.3", + "resolved": "https://registry.npmjs.org/svelte-hmr/-/svelte-hmr-0.15.3.tgz", + "integrity": "sha512-41snaPswvSf8TJUhlkoJBekRrABDXDMdpNpT2tfHIv4JuhgvHqLMhEPGtaQn0BmbNSTkuz2Ed20DF2eHw0SmBQ==", + "engines": { + "node": "^12.20 || ^14.13.1 || >= 16" + }, + "peerDependencies": { + "svelte": "^3.19.0 || ^4.0.0" + } + }, + "node_modules/svelte-preprocess": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/svelte-preprocess/-/svelte-preprocess-5.1.3.tgz", + "integrity": "sha512-xxAkmxGHT+J/GourS5mVJeOXZzne1FR5ljeOUAMXUkfEhkLEllRreXpbl3dIYJlcJRfL1LO1uIAPpBpBfiqGPw==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@types/pug": "^2.0.6", + "detect-indent": "^6.1.0", + "magic-string": "^0.30.5", + "sorcery": "^0.11.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">= 16.0.0", + "pnpm": "^8.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.10.2", + "coffeescript": "^2.5.1", + "less": "^3.11.3 || ^4.0.0", + "postcss": "^7 || ^8", + "postcss-load-config": "^2.1.0 || ^3.0.0 || ^4.0.0 || ^5.0.0", + "pug": "^3.0.0", + "sass": "^1.26.8", + "stylus": "^0.55.0", + "sugarss": "^2.0.0 || ^3.0.0 || ^4.0.0", + "svelte": "^3.23.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0", + "typescript": ">=3.9.5 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "coffeescript": { + "optional": true + }, + "less": { + "optional": true + }, + "postcss": { + "optional": true + }, + "postcss-load-config": { + "optional": true + }, + "pug": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/svelte-sonner": { + "version": "0.3.19", + "resolved": "https://registry.npmjs.org/svelte-sonner/-/svelte-sonner-0.3.19.tgz", + "integrity": "sha512-jpPOgLtHwRaB6Vqo2dUQMv15/yUV/BQWTjKpEqQ11uqRSHKjAYUKZyGrHB2cQsGmyjR0JUzBD58btpgNqINQ/Q==", + "peerDependencies": { + "svelte": ">=3 <5" + } + }, + "node_modules/svelte/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/svelte/node_modules/is-reference": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz", + "integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/symlink-or-copy": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/symlink-or-copy/-/symlink-or-copy-1.3.1.tgz", + "integrity": "sha512-0K91MEXFpBUaywiwSSkmKjnGcasG/rVBXFLJz5DrgGabpYD6N+3yZrfD6uUIfpuTu65DZLHi7N8CizHc07BPZA==", + "dev": true + }, + "node_modules/tabbable": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", + "integrity": "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==" + }, + "node_modules/tailwindcss": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.1.tgz", + "integrity": "sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==", + "dev": true, + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.5.3", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.0", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.19.1", + "lilconfig": "^2.1.0", + "micromatch": "^4.0.5", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.0.0", + "postcss": "^8.4.23", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.1", + "postcss-nested": "^6.0.1", + "postcss-selector-parser": "^6.0.11", + "resolve": "^1.22.2", + "sucrase": "^3.32.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tailwindcss/node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/tailwindcss/node_modules/postcss-load-config": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/tailwindcss/node_modules/postcss-load-config/node_modules/lilconfig": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/tailwindcss/node_modules/postcss-selector-parser": { + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/tailwindcss/node_modules/yaml": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", + "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/teex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz", + "integrity": "sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==", + "dev": true, + "dependencies": { + "streamx": "^2.12.5" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/throttleit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.1.tgz", + "integrity": "sha512-vDZpf9Chs9mAdfY046mcPt8fg5QSZr37hEH4TXYBnDF+izxgrbRGUAAaBvIk/fJm9aOFCGFd1EsNg5AZCbnQCQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", + "dependencies": { + "globalyzer": "0.1.0", + "globrex": "^0.1.2" + } + }, + "node_modules/tinybench": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.8.0.tgz", + "integrity": "sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==", + "dev": true + }, + "node_modules/tinypool": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.8.4.tgz", + "integrity": "sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.1.tgz", + "integrity": "sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tippy.js": { + "version": "6.3.7", + "resolved": "https://registry.npmjs.org/tippy.js/-/tippy.js-6.3.7.tgz", + "integrity": "sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==", + "dependencies": { + "@popperjs/core": "^2.9.0" + } + }, + "node_modules/tmp": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "dev": true, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/to-json-callback": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/to-json-callback/-/to-json-callback-0.1.1.tgz", + "integrity": "sha512-BzOeinTT3NjE+FJ2iCvWB8HvyuyBzoH3WlSnJ+AYVC4tlePyZWSYdkQIFOARWiq0t35/XhmI0uQsFiUsRksRqg==" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/to-through": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/to-through/-/to-through-3.0.0.tgz", + "integrity": "sha512-y8MN937s/HVhEoBU1SxfHC+wxCHkV1a9gW8eAdTadYh/bGyesZIVcbjI+mSpFbSVwQici/XjBjuUyri1dnXwBw==", + "dev": true, + "dependencies": { + "streamx": "^2.12.5" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", + "dev": true, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "engines": { + "node": ">=6.10" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/turndown": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/turndown/-/turndown-7.2.0.tgz", + "integrity": "sha512-eCZGBN4nNNqM9Owkv9HAtWRYfLA4h909E/WGAWWBpmB275ehNhZyk87/Tpvjbp0jjNl9XwCsbe6bm6CqFsgD+A==", + "dependencies": { + "@mixmark-io/domino": "^2.2.0" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-checked-collections": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/type-checked-collections/-/type-checked-collections-0.1.7.tgz", + "integrity": "sha512-fLIydlJy7IG9XL4wjRwEcKhxx/ekLXiWiMvcGo01cOMF+TN+5ZqajM1mRNRz2bNNi1bzou2yofhjZEQi7kgl9A==" + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", + "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ufo": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz", + "integrity": "sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==", + "dev": true + }, + "node_modules/underscore.string": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-3.3.6.tgz", + "integrity": "sha512-VoC83HWXmCrF6rgkyxS9GHv8W9Q5nhMKho+OadDJGzL2oDYbYEppBaCMH6pFlwLeqj2QS+hhkw2kpXkSdD1JxQ==", + "dev": true, + "dependencies": { + "sprintf-js": "^1.1.1", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/undici": { + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "devOptional": true + }, + "node_modules/unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", + "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/value-or-function": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/value-or-function/-/value-or-function-4.0.0.tgz", + "integrity": "sha512-aeVK81SIuT6aMJfNo9Vte8Dw0/FZINGBV8BfCraGtqVxIeLAEhJyoWs8SmvRVmXfGss2PmmOwZCuBPbZR+IYWg==", + "dev": true, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/verror/node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", + "dev": true + }, + "node_modules/vinyl": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-3.0.0.tgz", + "integrity": "sha512-rC2VRfAVVCGEgjnxHUnpIVh3AGuk62rP3tqVrn+yab0YH7UULisC085+NYH+mnqf3Wx4SpSi1RQMwudL89N03g==", + "dev": true, + "dependencies": { + "clone": "^2.1.2", + "clone-stats": "^1.0.0", + "remove-trailing-separator": "^1.1.0", + "replace-ext": "^2.0.0", + "teex": "^1.0.1" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-contents": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/vinyl-contents/-/vinyl-contents-2.0.0.tgz", + "integrity": "sha512-cHq6NnGyi2pZ7xwdHSW1v4Jfnho4TEGtxZHw01cmnc8+i7jgR6bRnED/LbrKan/Q7CvVLbnvA5OepnhbpjBZ5Q==", + "dev": true, + "dependencies": { + "bl": "^5.0.0", + "vinyl": "^3.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-fs": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-4.0.0.tgz", + "integrity": "sha512-7GbgBnYfaquMk3Qu9g22x000vbYkOex32930rBnc3qByw6HfMEAoELjCjoJv4HuEQxHAurT+nvMHm6MnJllFLw==", + "dev": true, + "dependencies": { + "fs-mkdirp-stream": "^2.0.1", + "glob-stream": "^8.0.0", + "graceful-fs": "^4.2.11", + "iconv-lite": "^0.6.3", + "is-valid-glob": "^1.0.0", + "lead": "^4.0.0", + "normalize-path": "3.0.0", + "resolve-options": "^2.0.0", + "stream-composer": "^1.0.2", + "streamx": "^2.14.0", + "to-through": "^3.0.0", + "value-or-function": "^4.0.0", + "vinyl": "^3.0.0", + "vinyl-sourcemap": "^2.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-sourcemap": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-2.0.0.tgz", + "integrity": "sha512-BAEvWxbBUXvlNoFQVFVHpybBbjW1r03WhohJzJDSfgrrK5xVYIDTan6xN14DlyImShgDRv2gl9qhM6irVMsV0Q==", + "dev": true, + "dependencies": { + "convert-source-map": "^2.0.0", + "graceful-fs": "^4.2.10", + "now-and-later": "^3.0.0", + "streamx": "^2.12.5", + "vinyl": "^3.0.0", + "vinyl-contents": "^2.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vite": { + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.3.tgz", + "integrity": "sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==", + "dependencies": { + "esbuild": "^0.18.10", + "postcss": "^8.4.27", + "rollup": "^3.27.1" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "@types/node": ">= 14", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.6.0.tgz", + "integrity": "sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==", + "dev": true, + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.3.4", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "vite": "^5.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite-node/node_modules/rollup": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.17.2.tgz", + "integrity": "sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.17.2", + "@rollup/rollup-android-arm64": "4.17.2", + "@rollup/rollup-darwin-arm64": "4.17.2", + "@rollup/rollup-darwin-x64": "4.17.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.17.2", + "@rollup/rollup-linux-arm-musleabihf": "4.17.2", + "@rollup/rollup-linux-arm64-gnu": "4.17.2", + "@rollup/rollup-linux-arm64-musl": "4.17.2", + "@rollup/rollup-linux-powerpc64le-gnu": "4.17.2", + "@rollup/rollup-linux-riscv64-gnu": "4.17.2", + "@rollup/rollup-linux-s390x-gnu": "4.17.2", + "@rollup/rollup-linux-x64-gnu": "4.17.2", + "@rollup/rollup-linux-x64-musl": "4.17.2", + "@rollup/rollup-win32-arm64-msvc": "4.17.2", + "@rollup/rollup-win32-ia32-msvc": "4.17.2", + "@rollup/rollup-win32-x64-msvc": "4.17.2", + "fsevents": "~2.3.2" + } + }, + "node_modules/vite-node/node_modules/vite": { + "version": "5.2.11", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.11.tgz", + "integrity": "sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==", + "dev": true, + "dependencies": { + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", + "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz", + "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz", + "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", + "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz", + "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz", + "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz", + "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz", + "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz", + "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz", + "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz", + "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz", + "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz", + "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz", + "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz", + "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz", + "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz", + "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz", + "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz", + "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz", + "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz", + "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz", + "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", + "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/android-arm": "0.18.20", + "@esbuild/android-arm64": "0.18.20", + "@esbuild/android-x64": "0.18.20", + "@esbuild/darwin-arm64": "0.18.20", + "@esbuild/darwin-x64": "0.18.20", + "@esbuild/freebsd-arm64": "0.18.20", + "@esbuild/freebsd-x64": "0.18.20", + "@esbuild/linux-arm": "0.18.20", + "@esbuild/linux-arm64": "0.18.20", + "@esbuild/linux-ia32": "0.18.20", + "@esbuild/linux-loong64": "0.18.20", + "@esbuild/linux-mips64el": "0.18.20", + "@esbuild/linux-ppc64": "0.18.20", + "@esbuild/linux-riscv64": "0.18.20", + "@esbuild/linux-s390x": "0.18.20", + "@esbuild/linux-x64": "0.18.20", + "@esbuild/netbsd-x64": "0.18.20", + "@esbuild/openbsd-x64": "0.18.20", + "@esbuild/sunos-x64": "0.18.20", + "@esbuild/win32-arm64": "0.18.20", + "@esbuild/win32-ia32": "0.18.20", + "@esbuild/win32-x64": "0.18.20" + } + }, + "node_modules/vitefu": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz", + "integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==", + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-1.6.0.tgz", + "integrity": "sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==", + "dev": true, + "dependencies": { + "@vitest/expect": "1.6.0", + "@vitest/runner": "1.6.0", + "@vitest/snapshot": "1.6.0", + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "acorn-walk": "^8.3.2", + "chai": "^4.3.10", + "debug": "^4.3.4", + "execa": "^8.0.1", + "local-pkg": "^0.5.0", + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "tinybench": "^2.5.1", + "tinypool": "^0.8.3", + "vite": "^5.0.0", + "vite-node": "1.6.0", + "why-is-node-running": "^2.2.2" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "1.6.0", + "@vitest/ui": "1.6.0", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/vitest/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/vitest/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/rollup": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.17.2.tgz", + "integrity": "sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.17.2", + "@rollup/rollup-android-arm64": "4.17.2", + "@rollup/rollup-darwin-arm64": "4.17.2", + "@rollup/rollup-darwin-x64": "4.17.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.17.2", + "@rollup/rollup-linux-arm-musleabihf": "4.17.2", + "@rollup/rollup-linux-arm64-gnu": "4.17.2", + "@rollup/rollup-linux-arm64-musl": "4.17.2", + "@rollup/rollup-linux-powerpc64le-gnu": "4.17.2", + "@rollup/rollup-linux-riscv64-gnu": "4.17.2", + "@rollup/rollup-linux-s390x-gnu": "4.17.2", + "@rollup/rollup-linux-x64-gnu": "4.17.2", + "@rollup/rollup-linux-x64-musl": "4.17.2", + "@rollup/rollup-win32-arm64-msvc": "4.17.2", + "@rollup/rollup-win32-ia32-msvc": "4.17.2", + "@rollup/rollup-win32-x64-msvc": "4.17.2", + "fsevents": "~2.3.2" + } + }, + "node_modules/vitest/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/vite": { + "version": "5.2.11", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.11.tgz", + "integrity": "sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==", + "dev": true, + "dependencies": { + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vue-template-compiler": { + "version": "2.7.16", + "resolved": "https://registry.npmjs.org/vue-template-compiler/-/vue-template-compiler-2.7.16.tgz", + "integrity": "sha512-AYbUWAJHLGGQM7+cNTELw+KsOG9nl2CnSv467WobS5Cv9uk3wFcnr1Etsz2sEIHEZvw1U+o9mRlEO6QbZvUPGQ==", + "dev": true, + "dependencies": { + "de-indent": "^1.0.2", + "he": "^1.2.0" + } + }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==" + }, + "node_modules/walk-sync": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/walk-sync/-/walk-sync-2.2.0.tgz", + "integrity": "sha512-IC8sL7aB4/ZgFcGI2T1LczZeFWZ06b3zoHH7jBPyHxOtIIz1jppWHjjEXkOFvFojBVAK9pV7g47xOZ4LW3QLfg==", + "dev": true, + "dependencies": { + "@types/minimatch": "^3.0.3", + "ensure-posix-path": "^1.1.0", + "matcher-collection": "^2.0.0", + "minimatch": "^3.0.4" + }, + "engines": { + "node": "8.* || >= 10.*" + } + }, + "node_modules/walk-sync/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/walk-sync/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/web-worker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", + "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==" + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/why-is-node-running": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.2.2.tgz", + "integrity": "sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==", + "dev": true, + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xmlhttprequest-ssl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz", + "integrity": "sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000000000000000000000000000000000000..f7cc1259827490ef41a4cabcf8a8520948f041d7 --- /dev/null +++ b/package.json @@ -0,0 +1,80 @@ +{ + "name": "open-webui", + "version": "0.3.10", + "private": true, + "scripts": { + "dev": "npm run pyodide:fetch && vite dev --host", + "build": "npm run pyodide:fetch && vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "lint": "npm run lint:frontend ; npm run lint:types ; npm run lint:backend", + "lint:frontend": "eslint . --fix", + "lint:types": "npm run check", + "lint:backend": "pylint backend/", + "format": "prettier --plugin-search-dir --write \"**/*.{js,ts,svelte,css,md,html,json}\"", + "format:backend": "black . --exclude \".venv/|/venv/\"", + "i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write \"src/lib/i18n/**/*.{js,json}\"", + "cy:open": "cypress open", + "test:frontend": "vitest --passWithNoTests", + "pyodide:fetch": "node scripts/prepare-pyodide.js" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^2.0.0", + "@sveltejs/adapter-static": "^2.0.3", + "@sveltejs/kit": "^1.30.0", + "@tailwindcss/typography": "^0.5.10", + "@types/bun": "latest", + "@typescript-eslint/eslint-plugin": "^6.17.0", + "@typescript-eslint/parser": "^6.17.0", + "autoprefixer": "^10.4.16", + "cypress": "^13.8.1", + "eslint": "^8.56.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-cypress": "^3.0.2", + "eslint-plugin-svelte": "^2.30.0", + "i18next-parser": "^8.13.0", + "postcss": "^8.4.31", + "prettier": "^2.8.0", + "prettier-plugin-svelte": "^2.10.1", + "svelte": "^4.0.5", + "svelte-check": "^3.4.3", + "svelte-confetti": "^1.3.2", + "tailwindcss": "^3.3.3", + "tslib": "^2.4.1", + "typescript": "^5.0.0", + "vite": "^4.4.2", + "vitest": "^1.6.0" + }, + "type": "module", + "dependencies": { + "@codemirror/lang-javascript": "^6.2.2", + "@codemirror/lang-python": "^6.1.6", + "@codemirror/theme-one-dark": "^6.1.2", + "@pyscript/core": "^0.4.32", + "@sveltejs/adapter-node": "^1.3.1", + "async": "^3.2.5", + "bits-ui": "^0.19.7", + "codemirror": "^6.0.1", + "crc-32": "^1.2.2", + "dayjs": "^1.11.10", + "eventsource-parser": "^1.1.2", + "file-saver": "^2.0.5", + "highlight.js": "^11.9.0", + "i18next": "^23.10.0", + "i18next-browser-languagedetector": "^7.2.0", + "i18next-resources-to-backend": "^1.2.0", + "idb": "^7.1.1", + "js-sha256": "^0.10.1", + "katex": "^0.16.9", + "marked": "^9.1.0", + "mermaid": "^10.9.1", + "pyodide": "^0.26.1", + "socket.io-client": "^4.2.0", + "sortablejs": "^1.15.2", + "svelte-sonner": "^0.3.19", + "tippy.js": "^6.3.7", + "turndown": "^7.2.0", + "uuid": "^9.0.1" + } +} diff --git a/postcss.config.js b/postcss.config.js new file mode 100644 index 0000000000000000000000000000000000000000..0f7721681d725ddea512a5ed734891cf6545ca3c --- /dev/null +++ b/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {} + } +}; diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..efce1158fbb90868b0d7dfe589d2d3f7360ff70f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,118 @@ +[project] +name = "open-webui" +description = "Open WebUI (Formerly Ollama WebUI)" +authors = [ + { name = "Timothy Jaeryang Baek", email = "tim@openwebui.com" } +] +license = { file = "LICENSE" } +dependencies = [ + "fastapi==0.111.0", + "uvicorn[standard]==0.22.0", + "pydantic==2.7.1", + "python-multipart==0.0.9", + + "Flask==3.0.3", + "Flask-Cors==4.0.1", + + "python-socketio==5.11.2", + "python-jose==3.3.0", + "passlib[bcrypt]==1.7.4", + + "requests==2.32.2", + "aiohttp==3.9.5", + "peewee==3.17.5", + "peewee-migrate==1.12.2", + "psycopg2-binary==2.9.9", + "PyMySQL==1.1.1", + "bcrypt==4.1.3", + + "boto3==1.34.110", + + "argon2-cffi==23.1.0", + "APScheduler==3.10.4", + "google-generativeai==0.5.4", + + "langchain==0.2.0", + "langchain-community==0.2.9", + "langchain-chroma==0.1.1", + + "fake-useragent==1.5.1", + "chromadb==0.5.0", + "sentence-transformers==2.7.0", + "pypdf==4.2.0", + "docx2txt==0.8", + "unstructured==0.14.0", + "Markdown==3.6", + "pypandoc==1.13", + "pandas==2.2.2", + "openpyxl==3.1.2", + "pyxlsb==1.0.10", + "xlrd==2.0.1", + "validators==0.28.1", + + "opencv-python-headless==4.9.0.80", + "rapidocr-onnxruntime==1.3.22", + + "fpdf2==2.7.9", + "rank-bm25==0.2.2", + + "faster-whisper==1.0.2", + + "PyJWT[crypto]==2.8.0", + "authlib==1.3.1", + + "black==24.4.2", + "langfuse==2.33.0", + "youtube-transcript-api==0.6.2", + "pytube==15.0.0", + "extract_msg", + "pydub", + "duckduckgo-search~=6.1.5" + +] +readme = "README.md" +requires-python = ">= 3.11, < 3.12.0a1" +dynamic = ["version"] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", + "Topic :: Communications :: Chat", + "Topic :: Multimedia", +] + +[project.scripts] +open-webui = "open_webui:app" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.rye] +managed = true +dev-dependencies = [] + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.version] +path = "package.json" +pattern = '"version":\s*"(?P[^"]+)"' + +[tool.hatch.build.hooks.custom] # keep this for reading hooks from `hatch_build.py` + +[tool.hatch.build.targets.wheel] +sources = ["backend"] +exclude = [ + ".dockerignore", + ".gitignore", + ".webui_secret_key", + "dev.sh", + "requirements.txt", + "start.sh", + "start_windows.bat", + "webui.db", + "chroma.sqlite3", +] +force-include = { "CHANGELOG.md" = "open_webui/CHANGELOG.md", build = "open_webui/frontend" } diff --git a/requirements-dev.lock b/requirements-dev.lock new file mode 100644 index 0000000000000000000000000000000000000000..e56ad08f0decb5f59981be168623b2a64d9e31dc --- /dev/null +++ b/requirements-dev.lock @@ -0,0 +1,684 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: false +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohttp==3.9.5 + # via langchain + # via langchain-community + # via open-webui +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.3.0 + # via httpx + # via starlette + # via watchfiles +apscheduler==3.10.4 + # via open-webui +argon2-cffi==23.1.0 + # via open-webui +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==23.2.0 + # via aiohttp +authlib==1.3.1 + # via open-webui +av==11.0.0 + # via faster-whisper +backoff==2.2.1 + # via langfuse + # via posthog + # via unstructured +bcrypt==4.1.3 + # via chromadb + # via open-webui + # via passlib +beautifulsoup4==4.12.3 + # via extract-msg + # via unstructured +bidict==0.23.1 + # via python-socketio +black==24.4.2 + # via open-webui +blinker==1.8.2 + # via flask +boto3==1.34.110 + # via open-webui +botocore==1.34.110 + # via boto3 + # via s3transfer +build==1.2.1 + # via chromadb +cachetools==5.3.3 + # via google-auth +certifi==2024.2.2 + # via httpcore + # via httpx + # via kubernetes + # via requests + # via unstructured-client +cffi==1.16.0 + # via argon2-cffi-bindings + # via cryptography +chardet==5.2.0 + # via unstructured +charset-normalizer==3.3.2 + # via requests + # via unstructured-client +chroma-hnswlib==0.7.3 + # via chromadb +chromadb==0.5.0 + # via langchain-chroma + # via open-webui +click==8.1.7 + # via black + # via duckduckgo-search + # via flask + # via nltk + # via peewee-migrate + # via typer + # via uvicorn +colorclass==2.2.2 + # via oletools +coloredlogs==15.0.1 + # via onnxruntime +compressed-rtf==1.0.6 + # via extract-msg +cryptography==42.0.7 + # via authlib + # via msoffcrypto-tool + # via pyjwt +ctranslate2==4.2.1 + # via faster-whisper +dataclasses-json==0.6.6 + # via langchain + # via langchain-community + # via unstructured + # via unstructured-client +deepdiff==7.0.1 + # via unstructured-client +defusedxml==0.7.1 + # via fpdf2 +deprecated==1.2.14 + # via opentelemetry-api + # via opentelemetry-exporter-otlp-proto-grpc +dnspython==2.6.1 + # via email-validator +docx2txt==0.8 + # via open-webui +duckduckgo-search==6.1.5 + # via open-webui +easygui==0.98.3 + # via oletools +ebcdic==1.1.1 + # via extract-msg +ecdsa==0.19.0 + # via python-jose +email-validator==2.1.1 + # via fastapi +emoji==2.11.1 + # via unstructured +et-xmlfile==1.1.0 + # via openpyxl +extract-msg==0.48.5 + # via open-webui +fake-useragent==1.5.1 + # via open-webui +fastapi==0.111.0 + # via chromadb + # via langchain-chroma + # via open-webui +fastapi-cli==0.0.4 + # via fastapi +faster-whisper==1.0.2 + # via open-webui +filelock==3.14.0 + # via huggingface-hub + # via torch + # via transformers +filetype==1.2.0 + # via unstructured +flask==3.0.3 + # via flask-cors + # via open-webui +flask-cors==4.0.1 + # via open-webui +flatbuffers==24.3.25 + # via onnxruntime +fonttools==4.51.0 + # via fpdf2 +fpdf2==2.7.9 + # via open-webui +frozenlist==1.4.1 + # via aiohttp + # via aiosignal +fsspec==2024.3.1 + # via huggingface-hub + # via torch +google-ai-generativelanguage==0.6.4 + # via google-generativeai +google-api-core==2.19.0 + # via google-ai-generativelanguage + # via google-api-python-client + # via google-generativeai +google-api-python-client==2.129.0 + # via google-generativeai +google-auth==2.29.0 + # via google-ai-generativelanguage + # via google-api-core + # via google-api-python-client + # via google-auth-httplib2 + # via google-generativeai + # via kubernetes +google-auth-httplib2==0.2.0 + # via google-api-python-client +google-generativeai==0.5.4 + # via open-webui +googleapis-common-protos==1.63.0 + # via google-api-core + # via grpcio-status + # via opentelemetry-exporter-otlp-proto-grpc +grpcio==1.63.0 + # via chromadb + # via google-api-core + # via grpcio-status + # via opentelemetry-exporter-otlp-proto-grpc +grpcio-status==1.62.2 + # via google-api-core +h11==0.14.0 + # via httpcore + # via uvicorn + # via wsproto +httpcore==1.0.5 + # via httpx +httplib2==0.22.0 + # via google-api-python-client + # via google-auth-httplib2 +httptools==0.6.1 + # via uvicorn +httpx==0.27.0 + # via fastapi + # via langfuse +huggingface-hub==0.23.0 + # via faster-whisper + # via sentence-transformers + # via tokenizers + # via transformers +humanfriendly==10.0 + # via coloredlogs +idna==3.7 + # via anyio + # via email-validator + # via httpx + # via langfuse + # via requests + # via unstructured-client + # via yarl +importlib-metadata==7.0.0 + # via opentelemetry-api +importlib-resources==6.4.0 + # via chromadb +itsdangerous==2.2.0 + # via flask +jinja2==3.1.4 + # via fastapi + # via flask + # via torch +jmespath==1.0.1 + # via boto3 + # via botocore +joblib==1.4.2 + # via nltk + # via scikit-learn +jsonpatch==1.33 + # via langchain-core +jsonpath-python==1.0.6 + # via unstructured-client +jsonpointer==2.4 + # via jsonpatch +kubernetes==29.0.0 + # via chromadb +langchain==0.2.0 + # via langchain-community + # via open-webui +langchain-chroma==0.1.1 + # via open-webui +langchain-community==0.2.0 + # via open-webui +langchain-core==0.2.1 + # via langchain + # via langchain-chroma + # via langchain-community + # via langchain-text-splitters +langchain-text-splitters==0.2.0 + # via langchain +langdetect==1.0.9 + # via unstructured +langfuse==2.33.0 + # via open-webui +langsmith==0.1.57 + # via langchain + # via langchain-community + # via langchain-core +lark==1.1.8 + # via rtfde +lxml==5.2.2 + # via unstructured +markdown==3.6 + # via open-webui +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 + # via jinja2 + # via werkzeug +marshmallow==3.21.2 + # via dataclasses-json + # via unstructured-client +mdurl==0.1.2 + # via markdown-it-py +mmh3==4.1.0 + # via chromadb +monotonic==1.6 + # via posthog +mpmath==1.3.0 + # via sympy +msoffcrypto-tool==5.4.1 + # via oletools +multidict==6.0.5 + # via aiohttp + # via yarl +mypy-extensions==1.0.0 + # via black + # via typing-inspect + # via unstructured-client +networkx==3.3 + # via torch +nltk==3.8.1 + # via unstructured +numpy==1.26.4 + # via chroma-hnswlib + # via chromadb + # via ctranslate2 + # via langchain + # via langchain-chroma + # via langchain-community + # via onnxruntime + # via opencv-python + # via opencv-python-headless + # via pandas + # via rank-bm25 + # via rapidocr-onnxruntime + # via scikit-learn + # via scipy + # via sentence-transformers + # via shapely + # via transformers + # via unstructured +oauthlib==3.2.2 + # via kubernetes + # via requests-oauthlib +olefile==0.47 + # via extract-msg + # via msoffcrypto-tool + # via oletools +oletools==0.60.1 + # via pcodedmp + # via rtfde +onnxruntime==1.17.3 + # via chromadb + # via faster-whisper + # via rapidocr-onnxruntime +opencv-python==4.9.0.80 + # via rapidocr-onnxruntime +opencv-python-headless==4.9.0.80 + # via open-webui +openpyxl==3.1.2 + # via open-webui +opentelemetry-api==1.24.0 + # via chromadb + # via opentelemetry-exporter-otlp-proto-grpc + # via opentelemetry-instrumentation + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi + # via opentelemetry-sdk +opentelemetry-exporter-otlp-proto-common==1.24.0 + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-exporter-otlp-proto-grpc==1.24.0 + # via chromadb +opentelemetry-instrumentation==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-asgi==0.45b0 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-fastapi==0.45b0 + # via chromadb +opentelemetry-proto==1.24.0 + # via opentelemetry-exporter-otlp-proto-common + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-sdk==1.24.0 + # via chromadb + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-semantic-conventions==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi + # via opentelemetry-sdk +opentelemetry-util-http==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi +ordered-set==4.1.0 + # via deepdiff +orjson==3.10.3 + # via chromadb + # via duckduckgo-search + # via fastapi + # via langsmith +overrides==7.7.0 + # via chromadb +packaging==23.2 + # via black + # via build + # via huggingface-hub + # via langchain-core + # via langfuse + # via marshmallow + # via onnxruntime + # via transformers + # via unstructured-client +pandas==2.2.2 + # via open-webui +passlib==1.7.4 + # via open-webui +pathspec==0.12.1 + # via black +pcodedmp==1.2.6 + # via oletools +peewee==3.17.5 + # via open-webui + # via peewee-migrate +peewee-migrate==1.12.2 + # via open-webui +pillow==10.3.0 + # via fpdf2 + # via rapidocr-onnxruntime + # via sentence-transformers +platformdirs==4.2.1 + # via black +posthog==3.5.0 + # via chromadb +proto-plus==1.23.0 + # via google-ai-generativelanguage + # via google-api-core +protobuf==4.25.3 + # via google-ai-generativelanguage + # via google-api-core + # via google-generativeai + # via googleapis-common-protos + # via grpcio-status + # via onnxruntime + # via opentelemetry-proto + # via proto-plus +psycopg2-binary==2.9.9 + # via open-webui +pyasn1==0.6.0 + # via pyasn1-modules + # via python-jose + # via rsa +pyasn1-modules==0.4.0 + # via google-auth +pyclipper==1.3.0.post5 + # via rapidocr-onnxruntime +pycparser==2.22 + # via cffi +pydantic==2.7.1 + # via chromadb + # via fastapi + # via google-generativeai + # via langchain + # via langchain-core + # via langfuse + # via langsmith + # via open-webui +pydantic-core==2.18.2 + # via pydantic +pydub==0.25.1 + # via open-webui +pygments==2.18.0 + # via rich +pyjwt==2.8.0 + # via open-webui +pymysql==1.1.0 + # via open-webui +pypandoc==1.13 + # via open-webui +pyparsing==2.4.7 + # via httplib2 + # via oletools +pypdf==4.2.0 + # via open-webui + # via unstructured-client +pypika==0.48.9 + # via chromadb +pyproject-hooks==1.1.0 + # via build +pyreqwest-impersonate==0.4.7 + # via duckduckgo-search +python-dateutil==2.9.0.post0 + # via botocore + # via kubernetes + # via pandas + # via posthog + # via unstructured-client +python-dotenv==1.0.1 + # via uvicorn +python-engineio==4.9.0 + # via python-socketio +python-iso639==2024.4.27 + # via unstructured +python-jose==3.3.0 + # via open-webui +python-magic==0.4.27 + # via unstructured +python-multipart==0.0.9 + # via fastapi + # via open-webui +python-socketio==5.11.2 + # via open-webui +pytube==15.0.0 + # via open-webui +pytz==2024.1 + # via apscheduler + # via pandas +pyxlsb==1.0.10 + # via open-webui +pyyaml==6.0.1 + # via chromadb + # via ctranslate2 + # via huggingface-hub + # via kubernetes + # via langchain + # via langchain-community + # via langchain-core + # via rapidocr-onnxruntime + # via transformers + # via uvicorn +rank-bm25==0.2.2 + # via open-webui +rapidfuzz==3.9.0 + # via unstructured +rapidocr-onnxruntime==1.3.22 + # via open-webui +red-black-tree-mod==1.20 + # via extract-msg +regex==2024.5.10 + # via nltk + # via transformers +requests==2.32.2 + # via chromadb + # via google-api-core + # via huggingface-hub + # via kubernetes + # via langchain + # via langchain-community + # via langsmith + # via open-webui + # via posthog + # via requests-oauthlib + # via transformers + # via unstructured + # via unstructured-client + # via youtube-transcript-api +requests-oauthlib==2.0.0 + # via kubernetes +rich==13.7.1 + # via typer +rsa==4.9 + # via google-auth + # via python-jose +rtfde==0.1.1 + # via extract-msg +s3transfer==0.10.1 + # via boto3 +safetensors==0.4.3 + # via transformers +scikit-learn==1.4.2 + # via sentence-transformers +scipy==1.13.0 + # via scikit-learn + # via sentence-transformers +sentence-transformers==2.7.0 + # via open-webui +setuptools==69.5.1 + # via ctranslate2 + # via opentelemetry-instrumentation +shapely==2.0.4 + # via rapidocr-onnxruntime +shellingham==1.5.4 + # via typer +simple-websocket==1.0.0 + # via python-engineio +six==1.16.0 + # via apscheduler + # via ecdsa + # via kubernetes + # via langdetect + # via posthog + # via python-dateutil + # via rapidocr-onnxruntime + # via unstructured-client +sniffio==1.3.1 + # via anyio + # via httpx +soupsieve==2.5 + # via beautifulsoup4 +sqlalchemy==2.0.30 + # via langchain + # via langchain-community +starlette==0.37.2 + # via fastapi +sympy==1.12 + # via onnxruntime + # via torch +tabulate==0.9.0 + # via unstructured +tenacity==8.3.0 + # via chromadb + # via langchain + # via langchain-community + # via langchain-core +threadpoolctl==3.5.0 + # via scikit-learn +tokenizers==0.15.2 + # via chromadb + # via faster-whisper + # via transformers +torch==2.3.0 + # via sentence-transformers +tqdm==4.66.4 + # via chromadb + # via google-generativeai + # via huggingface-hub + # via nltk + # via sentence-transformers + # via transformers +transformers==4.39.3 + # via sentence-transformers +typer==0.12.3 + # via chromadb + # via fastapi-cli +typing-extensions==4.11.0 + # via chromadb + # via fastapi + # via google-generativeai + # via huggingface-hub + # via opentelemetry-sdk + # via pydantic + # via pydantic-core + # via sqlalchemy + # via torch + # via typer + # via typing-inspect + # via unstructured + # via unstructured-client +typing-inspect==0.9.0 + # via dataclasses-json + # via unstructured-client +tzdata==2024.1 + # via pandas +tzlocal==5.2 + # via apscheduler + # via extract-msg +ujson==5.10.0 + # via fastapi +unstructured==0.14.0 + # via open-webui +unstructured-client==0.22.0 + # via unstructured +uritemplate==4.1.1 + # via google-api-python-client +urllib3==2.2.1 + # via botocore + # via kubernetes + # via requests + # via unstructured-client +uvicorn==0.22.0 + # via chromadb + # via fastapi + # via open-webui +uvloop==0.19.0 + # via uvicorn +validators==0.28.1 + # via open-webui +watchfiles==0.21.0 + # via uvicorn +websocket-client==1.8.0 + # via kubernetes +websockets==12.0 + # via uvicorn +werkzeug==3.0.3 + # via flask +wrapt==1.16.0 + # via deprecated + # via langfuse + # via opentelemetry-instrumentation + # via unstructured +wsproto==1.2.0 + # via simple-websocket +xlrd==2.0.1 + # via open-webui +yarl==1.9.4 + # via aiohttp +youtube-transcript-api==0.6.2 + # via open-webui +zipp==3.18.1 + # via importlib-metadata diff --git a/requirements.lock b/requirements.lock new file mode 100644 index 0000000000000000000000000000000000000000..e56ad08f0decb5f59981be168623b2a64d9e31dc --- /dev/null +++ b/requirements.lock @@ -0,0 +1,684 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: false +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohttp==3.9.5 + # via langchain + # via langchain-community + # via open-webui +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.3.0 + # via httpx + # via starlette + # via watchfiles +apscheduler==3.10.4 + # via open-webui +argon2-cffi==23.1.0 + # via open-webui +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==23.2.0 + # via aiohttp +authlib==1.3.1 + # via open-webui +av==11.0.0 + # via faster-whisper +backoff==2.2.1 + # via langfuse + # via posthog + # via unstructured +bcrypt==4.1.3 + # via chromadb + # via open-webui + # via passlib +beautifulsoup4==4.12.3 + # via extract-msg + # via unstructured +bidict==0.23.1 + # via python-socketio +black==24.4.2 + # via open-webui +blinker==1.8.2 + # via flask +boto3==1.34.110 + # via open-webui +botocore==1.34.110 + # via boto3 + # via s3transfer +build==1.2.1 + # via chromadb +cachetools==5.3.3 + # via google-auth +certifi==2024.2.2 + # via httpcore + # via httpx + # via kubernetes + # via requests + # via unstructured-client +cffi==1.16.0 + # via argon2-cffi-bindings + # via cryptography +chardet==5.2.0 + # via unstructured +charset-normalizer==3.3.2 + # via requests + # via unstructured-client +chroma-hnswlib==0.7.3 + # via chromadb +chromadb==0.5.0 + # via langchain-chroma + # via open-webui +click==8.1.7 + # via black + # via duckduckgo-search + # via flask + # via nltk + # via peewee-migrate + # via typer + # via uvicorn +colorclass==2.2.2 + # via oletools +coloredlogs==15.0.1 + # via onnxruntime +compressed-rtf==1.0.6 + # via extract-msg +cryptography==42.0.7 + # via authlib + # via msoffcrypto-tool + # via pyjwt +ctranslate2==4.2.1 + # via faster-whisper +dataclasses-json==0.6.6 + # via langchain + # via langchain-community + # via unstructured + # via unstructured-client +deepdiff==7.0.1 + # via unstructured-client +defusedxml==0.7.1 + # via fpdf2 +deprecated==1.2.14 + # via opentelemetry-api + # via opentelemetry-exporter-otlp-proto-grpc +dnspython==2.6.1 + # via email-validator +docx2txt==0.8 + # via open-webui +duckduckgo-search==6.1.5 + # via open-webui +easygui==0.98.3 + # via oletools +ebcdic==1.1.1 + # via extract-msg +ecdsa==0.19.0 + # via python-jose +email-validator==2.1.1 + # via fastapi +emoji==2.11.1 + # via unstructured +et-xmlfile==1.1.0 + # via openpyxl +extract-msg==0.48.5 + # via open-webui +fake-useragent==1.5.1 + # via open-webui +fastapi==0.111.0 + # via chromadb + # via langchain-chroma + # via open-webui +fastapi-cli==0.0.4 + # via fastapi +faster-whisper==1.0.2 + # via open-webui +filelock==3.14.0 + # via huggingface-hub + # via torch + # via transformers +filetype==1.2.0 + # via unstructured +flask==3.0.3 + # via flask-cors + # via open-webui +flask-cors==4.0.1 + # via open-webui +flatbuffers==24.3.25 + # via onnxruntime +fonttools==4.51.0 + # via fpdf2 +fpdf2==2.7.9 + # via open-webui +frozenlist==1.4.1 + # via aiohttp + # via aiosignal +fsspec==2024.3.1 + # via huggingface-hub + # via torch +google-ai-generativelanguage==0.6.4 + # via google-generativeai +google-api-core==2.19.0 + # via google-ai-generativelanguage + # via google-api-python-client + # via google-generativeai +google-api-python-client==2.129.0 + # via google-generativeai +google-auth==2.29.0 + # via google-ai-generativelanguage + # via google-api-core + # via google-api-python-client + # via google-auth-httplib2 + # via google-generativeai + # via kubernetes +google-auth-httplib2==0.2.0 + # via google-api-python-client +google-generativeai==0.5.4 + # via open-webui +googleapis-common-protos==1.63.0 + # via google-api-core + # via grpcio-status + # via opentelemetry-exporter-otlp-proto-grpc +grpcio==1.63.0 + # via chromadb + # via google-api-core + # via grpcio-status + # via opentelemetry-exporter-otlp-proto-grpc +grpcio-status==1.62.2 + # via google-api-core +h11==0.14.0 + # via httpcore + # via uvicorn + # via wsproto +httpcore==1.0.5 + # via httpx +httplib2==0.22.0 + # via google-api-python-client + # via google-auth-httplib2 +httptools==0.6.1 + # via uvicorn +httpx==0.27.0 + # via fastapi + # via langfuse +huggingface-hub==0.23.0 + # via faster-whisper + # via sentence-transformers + # via tokenizers + # via transformers +humanfriendly==10.0 + # via coloredlogs +idna==3.7 + # via anyio + # via email-validator + # via httpx + # via langfuse + # via requests + # via unstructured-client + # via yarl +importlib-metadata==7.0.0 + # via opentelemetry-api +importlib-resources==6.4.0 + # via chromadb +itsdangerous==2.2.0 + # via flask +jinja2==3.1.4 + # via fastapi + # via flask + # via torch +jmespath==1.0.1 + # via boto3 + # via botocore +joblib==1.4.2 + # via nltk + # via scikit-learn +jsonpatch==1.33 + # via langchain-core +jsonpath-python==1.0.6 + # via unstructured-client +jsonpointer==2.4 + # via jsonpatch +kubernetes==29.0.0 + # via chromadb +langchain==0.2.0 + # via langchain-community + # via open-webui +langchain-chroma==0.1.1 + # via open-webui +langchain-community==0.2.0 + # via open-webui +langchain-core==0.2.1 + # via langchain + # via langchain-chroma + # via langchain-community + # via langchain-text-splitters +langchain-text-splitters==0.2.0 + # via langchain +langdetect==1.0.9 + # via unstructured +langfuse==2.33.0 + # via open-webui +langsmith==0.1.57 + # via langchain + # via langchain-community + # via langchain-core +lark==1.1.8 + # via rtfde +lxml==5.2.2 + # via unstructured +markdown==3.6 + # via open-webui +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 + # via jinja2 + # via werkzeug +marshmallow==3.21.2 + # via dataclasses-json + # via unstructured-client +mdurl==0.1.2 + # via markdown-it-py +mmh3==4.1.0 + # via chromadb +monotonic==1.6 + # via posthog +mpmath==1.3.0 + # via sympy +msoffcrypto-tool==5.4.1 + # via oletools +multidict==6.0.5 + # via aiohttp + # via yarl +mypy-extensions==1.0.0 + # via black + # via typing-inspect + # via unstructured-client +networkx==3.3 + # via torch +nltk==3.8.1 + # via unstructured +numpy==1.26.4 + # via chroma-hnswlib + # via chromadb + # via ctranslate2 + # via langchain + # via langchain-chroma + # via langchain-community + # via onnxruntime + # via opencv-python + # via opencv-python-headless + # via pandas + # via rank-bm25 + # via rapidocr-onnxruntime + # via scikit-learn + # via scipy + # via sentence-transformers + # via shapely + # via transformers + # via unstructured +oauthlib==3.2.2 + # via kubernetes + # via requests-oauthlib +olefile==0.47 + # via extract-msg + # via msoffcrypto-tool + # via oletools +oletools==0.60.1 + # via pcodedmp + # via rtfde +onnxruntime==1.17.3 + # via chromadb + # via faster-whisper + # via rapidocr-onnxruntime +opencv-python==4.9.0.80 + # via rapidocr-onnxruntime +opencv-python-headless==4.9.0.80 + # via open-webui +openpyxl==3.1.2 + # via open-webui +opentelemetry-api==1.24.0 + # via chromadb + # via opentelemetry-exporter-otlp-proto-grpc + # via opentelemetry-instrumentation + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi + # via opentelemetry-sdk +opentelemetry-exporter-otlp-proto-common==1.24.0 + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-exporter-otlp-proto-grpc==1.24.0 + # via chromadb +opentelemetry-instrumentation==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-asgi==0.45b0 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-fastapi==0.45b0 + # via chromadb +opentelemetry-proto==1.24.0 + # via opentelemetry-exporter-otlp-proto-common + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-sdk==1.24.0 + # via chromadb + # via opentelemetry-exporter-otlp-proto-grpc +opentelemetry-semantic-conventions==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi + # via opentelemetry-sdk +opentelemetry-util-http==0.45b0 + # via opentelemetry-instrumentation-asgi + # via opentelemetry-instrumentation-fastapi +ordered-set==4.1.0 + # via deepdiff +orjson==3.10.3 + # via chromadb + # via duckduckgo-search + # via fastapi + # via langsmith +overrides==7.7.0 + # via chromadb +packaging==23.2 + # via black + # via build + # via huggingface-hub + # via langchain-core + # via langfuse + # via marshmallow + # via onnxruntime + # via transformers + # via unstructured-client +pandas==2.2.2 + # via open-webui +passlib==1.7.4 + # via open-webui +pathspec==0.12.1 + # via black +pcodedmp==1.2.6 + # via oletools +peewee==3.17.5 + # via open-webui + # via peewee-migrate +peewee-migrate==1.12.2 + # via open-webui +pillow==10.3.0 + # via fpdf2 + # via rapidocr-onnxruntime + # via sentence-transformers +platformdirs==4.2.1 + # via black +posthog==3.5.0 + # via chromadb +proto-plus==1.23.0 + # via google-ai-generativelanguage + # via google-api-core +protobuf==4.25.3 + # via google-ai-generativelanguage + # via google-api-core + # via google-generativeai + # via googleapis-common-protos + # via grpcio-status + # via onnxruntime + # via opentelemetry-proto + # via proto-plus +psycopg2-binary==2.9.9 + # via open-webui +pyasn1==0.6.0 + # via pyasn1-modules + # via python-jose + # via rsa +pyasn1-modules==0.4.0 + # via google-auth +pyclipper==1.3.0.post5 + # via rapidocr-onnxruntime +pycparser==2.22 + # via cffi +pydantic==2.7.1 + # via chromadb + # via fastapi + # via google-generativeai + # via langchain + # via langchain-core + # via langfuse + # via langsmith + # via open-webui +pydantic-core==2.18.2 + # via pydantic +pydub==0.25.1 + # via open-webui +pygments==2.18.0 + # via rich +pyjwt==2.8.0 + # via open-webui +pymysql==1.1.0 + # via open-webui +pypandoc==1.13 + # via open-webui +pyparsing==2.4.7 + # via httplib2 + # via oletools +pypdf==4.2.0 + # via open-webui + # via unstructured-client +pypika==0.48.9 + # via chromadb +pyproject-hooks==1.1.0 + # via build +pyreqwest-impersonate==0.4.7 + # via duckduckgo-search +python-dateutil==2.9.0.post0 + # via botocore + # via kubernetes + # via pandas + # via posthog + # via unstructured-client +python-dotenv==1.0.1 + # via uvicorn +python-engineio==4.9.0 + # via python-socketio +python-iso639==2024.4.27 + # via unstructured +python-jose==3.3.0 + # via open-webui +python-magic==0.4.27 + # via unstructured +python-multipart==0.0.9 + # via fastapi + # via open-webui +python-socketio==5.11.2 + # via open-webui +pytube==15.0.0 + # via open-webui +pytz==2024.1 + # via apscheduler + # via pandas +pyxlsb==1.0.10 + # via open-webui +pyyaml==6.0.1 + # via chromadb + # via ctranslate2 + # via huggingface-hub + # via kubernetes + # via langchain + # via langchain-community + # via langchain-core + # via rapidocr-onnxruntime + # via transformers + # via uvicorn +rank-bm25==0.2.2 + # via open-webui +rapidfuzz==3.9.0 + # via unstructured +rapidocr-onnxruntime==1.3.22 + # via open-webui +red-black-tree-mod==1.20 + # via extract-msg +regex==2024.5.10 + # via nltk + # via transformers +requests==2.32.2 + # via chromadb + # via google-api-core + # via huggingface-hub + # via kubernetes + # via langchain + # via langchain-community + # via langsmith + # via open-webui + # via posthog + # via requests-oauthlib + # via transformers + # via unstructured + # via unstructured-client + # via youtube-transcript-api +requests-oauthlib==2.0.0 + # via kubernetes +rich==13.7.1 + # via typer +rsa==4.9 + # via google-auth + # via python-jose +rtfde==0.1.1 + # via extract-msg +s3transfer==0.10.1 + # via boto3 +safetensors==0.4.3 + # via transformers +scikit-learn==1.4.2 + # via sentence-transformers +scipy==1.13.0 + # via scikit-learn + # via sentence-transformers +sentence-transformers==2.7.0 + # via open-webui +setuptools==69.5.1 + # via ctranslate2 + # via opentelemetry-instrumentation +shapely==2.0.4 + # via rapidocr-onnxruntime +shellingham==1.5.4 + # via typer +simple-websocket==1.0.0 + # via python-engineio +six==1.16.0 + # via apscheduler + # via ecdsa + # via kubernetes + # via langdetect + # via posthog + # via python-dateutil + # via rapidocr-onnxruntime + # via unstructured-client +sniffio==1.3.1 + # via anyio + # via httpx +soupsieve==2.5 + # via beautifulsoup4 +sqlalchemy==2.0.30 + # via langchain + # via langchain-community +starlette==0.37.2 + # via fastapi +sympy==1.12 + # via onnxruntime + # via torch +tabulate==0.9.0 + # via unstructured +tenacity==8.3.0 + # via chromadb + # via langchain + # via langchain-community + # via langchain-core +threadpoolctl==3.5.0 + # via scikit-learn +tokenizers==0.15.2 + # via chromadb + # via faster-whisper + # via transformers +torch==2.3.0 + # via sentence-transformers +tqdm==4.66.4 + # via chromadb + # via google-generativeai + # via huggingface-hub + # via nltk + # via sentence-transformers + # via transformers +transformers==4.39.3 + # via sentence-transformers +typer==0.12.3 + # via chromadb + # via fastapi-cli +typing-extensions==4.11.0 + # via chromadb + # via fastapi + # via google-generativeai + # via huggingface-hub + # via opentelemetry-sdk + # via pydantic + # via pydantic-core + # via sqlalchemy + # via torch + # via typer + # via typing-inspect + # via unstructured + # via unstructured-client +typing-inspect==0.9.0 + # via dataclasses-json + # via unstructured-client +tzdata==2024.1 + # via pandas +tzlocal==5.2 + # via apscheduler + # via extract-msg +ujson==5.10.0 + # via fastapi +unstructured==0.14.0 + # via open-webui +unstructured-client==0.22.0 + # via unstructured +uritemplate==4.1.1 + # via google-api-python-client +urllib3==2.2.1 + # via botocore + # via kubernetes + # via requests + # via unstructured-client +uvicorn==0.22.0 + # via chromadb + # via fastapi + # via open-webui +uvloop==0.19.0 + # via uvicorn +validators==0.28.1 + # via open-webui +watchfiles==0.21.0 + # via uvicorn +websocket-client==1.8.0 + # via kubernetes +websockets==12.0 + # via uvicorn +werkzeug==3.0.3 + # via flask +wrapt==1.16.0 + # via deprecated + # via langfuse + # via opentelemetry-instrumentation + # via unstructured +wsproto==1.2.0 + # via simple-websocket +xlrd==2.0.1 + # via open-webui +yarl==1.9.4 + # via aiohttp +youtube-transcript-api==0.6.2 + # via open-webui +zipp==3.18.1 + # via importlib-metadata diff --git a/run-compose.sh b/run-compose.sh new file mode 100755 index 0000000000000000000000000000000000000000..21574e95997fd512bc243adf0fa77456e570010d --- /dev/null +++ b/run-compose.sh @@ -0,0 +1,241 @@ +#!/bin/bash + +# Define color and formatting codes +BOLD='\033[1m' +GREEN='\033[1;32m' +WHITE='\033[1;37m' +RED='\033[0;31m' +NC='\033[0m' # No Color +# Unicode character for tick mark +TICK='\u2713' + +# Detect GPU driver +get_gpu_driver() { + # Detect NVIDIA GPUs using lspci or nvidia-smi + if lspci | grep -i nvidia >/dev/null || nvidia-smi >/dev/null 2>&1; then + echo "nvidia" + return + fi + + # Detect AMD GPUs (including GCN architecture check for amdgpu vs radeon) + if lspci | grep -i amd >/dev/null; then + # List of known GCN and later architecture cards + # This is a simplified list, and in a real-world scenario, you'd want a more comprehensive one + local gcn_and_later=("Radeon HD 7000" "Radeon HD 8000" "Radeon R5" "Radeon R7" "Radeon R9" "Radeon RX") + + # Get GPU information + local gpu_info=$(lspci | grep -i 'vga.*amd') + + for model in "${gcn_and_later[@]}"; do + if echo "$gpu_info" | grep -iq "$model"; then + echo "amdgpu" + return + fi + done + + # Default to radeon if no GCN or later architecture is detected + echo "radeon" + return + fi + + # Detect Intel GPUs + if lspci | grep -i intel >/dev/null; then + echo "i915" + return + fi + + # If no known GPU is detected + echo "Unknown or unsupported GPU driver" + exit 1 +} + +# Function for rolling animation +show_loading() { + local spin='-\|/' + local i=0 + + printf " " + + while kill -0 $1 2>/dev/null; do + i=$(( (i+1) %4 )) + printf "\b${spin:$i:1}" + sleep .1 + done + + # Replace the spinner with a tick + printf "\b${GREEN}${TICK}${NC}" +} + +# Usage information +usage() { + echo "Usage: $0 [OPTIONS]" + echo "Options:" + echo " --enable-gpu[count=COUNT] Enable GPU support with the specified count." + echo " --enable-api[port=PORT] Enable API and expose it on the specified port." + echo " --webui[port=PORT] Set the port for the web user interface." + echo " --data[folder=PATH] Bind mount for ollama data folder (by default will create the 'ollama' volume)." + echo " --build Build the docker image before running the compose project." + echo " --drop Drop the compose project." + echo " -q, --quiet Run script in headless mode." + echo " -h, --help Show this help message." + echo "" + echo "Examples:" + echo " $0 --drop" + echo " $0 --enable-gpu[count=1]" + echo " $0 --enable-gpu[count=all]" + echo " $0 --enable-api[port=11435]" + echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]" + echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]" + echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data] --build" + echo "" + echo "This script configures and runs a docker-compose setup with optional GPU support, API exposure, and web UI configuration." + echo "About the gpu to use, the script automatically detects it using the "lspci" command." + echo "In this case the gpu detected is: $(get_gpu_driver)" +} + +# Default values +gpu_count=1 +api_port=11435 +webui_port=3000 +headless=false +build_image=false +kill_compose=false + +# Function to extract value from the parameter +extract_value() { + echo "$1" | sed -E 's/.*\[.*=(.*)\].*/\1/; t; s/.*//' +} + +# Parse arguments +while [[ $# -gt 0 ]]; do + key="$1" + + case $key in + --enable-gpu*) + enable_gpu=true + value=$(extract_value "$key") + gpu_count=${value:-1} + ;; + --enable-api*) + enable_api=true + value=$(extract_value "$key") + api_port=${value:-11435} + ;; + --webui*) + value=$(extract_value "$key") + webui_port=${value:-3000} + ;; + --data*) + value=$(extract_value "$key") + data_dir=${value:-"./ollama-data"} + ;; + --drop) + kill_compose=true + ;; + --build) + build_image=true + ;; + -q|--quiet) + headless=true + ;; + -h|--help) + usage + exit + ;; + *) + # Unknown option + echo "Unknown option: $key" + usage + exit 1 + ;; + esac + shift # past argument or value +done + +if [[ $kill_compose == true ]]; then + docker compose down --remove-orphans + echo -e "${GREEN}${BOLD}Compose project dropped successfully.${NC}" + exit +else + DEFAULT_COMPOSE_COMMAND="docker compose -f docker-compose.yaml" + if [[ $enable_gpu == true ]]; then + # Validate and process command-line arguments + if [[ -n $gpu_count ]]; then + if ! [[ $gpu_count =~ ^([0-9]+|all)$ ]]; then + echo "Invalid GPU count: $gpu_count" + exit 1 + fi + echo "Enabling GPU with $gpu_count GPUs" + # Add your GPU allocation logic here + export OLLAMA_GPU_DRIVER=$(get_gpu_driver) + export OLLAMA_GPU_COUNT=$gpu_count # Set OLLAMA_GPU_COUNT environment variable + fi + DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.gpu.yaml" + fi + if [[ $enable_api == true ]]; then + DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.api.yaml" + if [[ -n $api_port ]]; then + export OLLAMA_WEBAPI_PORT=$api_port # Set OLLAMA_WEBAPI_PORT environment variable + fi + fi + if [[ -n $data_dir ]]; then + DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.data.yaml" + export OLLAMA_DATA_DIR=$data_dir # Set OLLAMA_DATA_DIR environment variable + fi + if [[ -n $webui_port ]]; then + export OPEN_WEBUI_PORT=$webui_port # Set OPEN_WEBUI_PORT environment variable + fi + DEFAULT_COMPOSE_COMMAND+=" up -d" + DEFAULT_COMPOSE_COMMAND+=" --remove-orphans" + DEFAULT_COMPOSE_COMMAND+=" --force-recreate" + if [[ $build_image == true ]]; then + DEFAULT_COMPOSE_COMMAND+=" --build" + fi +fi + +# Recap of environment variables +echo +echo -e "${WHITE}${BOLD}Current Setup:${NC}" +echo -e " ${GREEN}${BOLD}GPU Driver:${NC} ${OLLAMA_GPU_DRIVER:-Not Enabled}" +echo -e " ${GREEN}${BOLD}GPU Count:${NC} ${OLLAMA_GPU_COUNT:-Not Enabled}" +echo -e " ${GREEN}${BOLD}WebAPI Port:${NC} ${OLLAMA_WEBAPI_PORT:-Not Enabled}" +echo -e " ${GREEN}${BOLD}Data Folder:${NC} ${data_dir:-Using ollama volume}" +echo -e " ${GREEN}${BOLD}WebUI Port:${NC} $webui_port" +echo + +if [[ $headless == true ]]; then + echo -ne "${WHITE}${BOLD}Running in headless mode... ${NC}" + choice="y" +else + # Ask for user acceptance + echo -ne "${WHITE}${BOLD}Do you want to proceed with current setup? (Y/n): ${NC}" + read -n1 -s choice +fi + +echo + +if [[ $choice == "" || $choice == "y" ]]; then + # Execute the command with the current user + eval "$DEFAULT_COMPOSE_COMMAND" & + + # Capture the background process PID + PID=$! + + # Display the loading animation + #show_loading $PID + + # Wait for the command to finish + wait $PID + + echo + # Check exit status + if [ $? -eq 0 ]; then + echo -e "${GREEN}${BOLD}Compose project started successfully.${NC}" + else + echo -e "${RED}${BOLD}There was an error starting the compose project.${NC}" + fi +else + echo "Aborted." +fi + +echo diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh new file mode 100644 index 0000000000000000000000000000000000000000..c2a025bea3fa88beab7c0e6640de682dc8169c02 --- /dev/null +++ b/run-ollama-docker.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +host_port=11434 +container_port=11434 + +read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu + +docker rm -f ollama || true +docker pull ollama/ollama:latest + +docker_args="-d -v ollama:/root/.ollama -p $host_port:$container_port --name ollama ollama/ollama" + +if [ "$use_gpu" = "y" ]; then + docker_args="--gpus=all $docker_args" +fi + +docker run $docker_args + +docker image prune -f \ No newline at end of file diff --git a/run.sh b/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..6793fe16271c0b56b9ad1cc409d533bfe1e6ca83 --- /dev/null +++ b/run.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +image_name="open-webui" +container_name="open-webui" +host_port=3000 +container_port=8080 + +docker build -t "$image_name" . +docker stop "$container_name" &>/dev/null || true +docker rm "$container_name" &>/dev/null || true + +docker run -d -p "$host_port":"$container_port" \ + --add-host=host.docker.internal:host-gateway \ + -v "${image_name}:/app/backend/data" \ + --name "$container_name" \ + --restart always \ + "$image_name" + +docker image prune -f diff --git a/scripts/prepare-pyodide.js b/scripts/prepare-pyodide.js new file mode 100644 index 0000000000000000000000000000000000000000..5aaac5927e7dbf5674bd3dc4839b9ed1548bf7b4 --- /dev/null +++ b/scripts/prepare-pyodide.js @@ -0,0 +1,85 @@ +const packages = [ + 'micropip', + 'packaging', + 'requests', + 'beautifulsoup4', + 'numpy', + 'pandas', + 'matplotlib', + 'scikit-learn', + 'scipy', + 'regex', + 'seaborn' +]; + +import { loadPyodide } from 'pyodide'; +import { writeFile, readFile, copyFile, readdir, rmdir } from 'fs/promises'; + +async function downloadPackages() { + console.log('Setting up pyodide + micropip'); + + let pyodide; + try { + pyodide = await loadPyodide({ + packageCacheDir: 'static/pyodide' + }); + } catch (err) { + console.error('Failed to load Pyodide:', err); + return; + } + + const packageJson = JSON.parse(await readFile('package.json')); + const pyodideVersion = packageJson.dependencies.pyodide.replace('^', ''); + + try { + const pyodidePackageJson = JSON.parse(await readFile('static/pyodide/package.json')); + const pyodidePackageVersion = pyodidePackageJson.version.replace('^', ''); + + if (pyodideVersion !== pyodidePackageVersion) { + console.log('Pyodide version mismatch, removing static/pyodide directory'); + await rmdir('static/pyodide', { recursive: true }); + } + } catch (e) { + console.log('Pyodide package not found, proceeding with download.'); + } + + try { + console.log('Loading micropip package'); + await pyodide.loadPackage('micropip'); + + const micropip = pyodide.pyimport('micropip'); + console.log('Downloading Pyodide packages:', packages); + + try { + for (const pkg of packages) { + console.log(`Installing package: ${pkg}`); + await micropip.install(pkg); + } + } catch (err) { + console.error('Package installation failed:', err); + return; + } + + console.log('Pyodide packages downloaded, freezing into lock file'); + + try { + const lockFile = await micropip.freeze(); + await writeFile('static/pyodide/pyodide-lock.json', lockFile); + } catch (err) { + console.error('Failed to write lock file:', err); + } + } catch (err) { + console.error('Failed to load or install micropip:', err); + } +} + +async function copyPyodide() { + console.log('Copying Pyodide files into static directory'); + // Copy all files from node_modules/pyodide to static/pyodide + for await (const entry of await readdir('node_modules/pyodide')) { + await copyFile(`node_modules/pyodide/${entry}`, `static/pyodide/${entry}`); + } +} + +await downloadPackages(); +await copyPyodide(); diff --git a/src/app.css b/src/app.css new file mode 100644 index 0000000000000000000000000000000000000000..c3388f1d36d692f921c97f1b85d6b3a71c534567 --- /dev/null +++ b/src/app.css @@ -0,0 +1,156 @@ +@font-face { + font-family: 'Inter'; + src: url('/assets/fonts/Inter-Variable.ttf'); + font-display: swap; +} + +@font-face { + font-family: 'Archivo'; + src: url('/assets/fonts/Archivo-Variable.ttf'); + font-display: swap; +} + +@font-face { + font-family: 'Mona Sans'; + src: url('/assets/fonts/Mona-Sans.woff2'); + font-display: swap; +} + +html { + word-break: break-word; +} + +code { + /* white-space-collapse: preserve !important; */ + overflow-x: auto; + width: auto; +} + +math { + margin-top: 1rem; +} + +.hljs { + @apply rounded-lg; +} + +.markdown a { + @apply underline; +} + +.font-primary { + font-family: 'Archivo', sans-serif; +} + +iframe { + @apply rounded-lg; +} + +ol > li { + counter-increment: list-number; + display: block; + margin-bottom: 0; + margin-top: 0; + min-height: 28px; +} + +.prose ol > li::before { + content: counters(list-number, '.') '.'; + padding-right: 0.5rem; + color: var(--tw-prose-counters); + font-weight: 400; +} + +li p { + display: inline; +} + +::-webkit-scrollbar-thumb { + --tw-border-opacity: 1; + background-color: rgba(217, 217, 227, 0.8); + border-color: rgba(255, 255, 255, var(--tw-border-opacity)); + border-radius: 9999px; + border-width: 1px; +} + +::-webkit-scrollbar { + height: 0.4rem; + width: 0.4rem; +} + +::-webkit-scrollbar-track { + background-color: transparent; + border-radius: 9999px; +} + +select { + background-image: url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3E%3Cpath stroke='%236B7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='m6 8 4 4 4-4'/%3E%3C/svg%3E"); + background-position: right 0.5rem center; + background-repeat: no-repeat; + background-size: 1.5em 1.5em; + padding-right: 2.5rem; + -webkit-print-color-adjust: exact; + print-color-adjust: exact; + /* for Firefox */ + -moz-appearance: none; + /* for Chrome */ + -webkit-appearance: none; +} + +.katex-mathml { + display: none; +} + +.scrollbar-hidden:active::-webkit-scrollbar-thumb, +.scrollbar-hidden:focus::-webkit-scrollbar-thumb, +.scrollbar-hidden:hover::-webkit-scrollbar-thumb { + visibility: visible; +} +.scrollbar-hidden::-webkit-scrollbar-thumb { + visibility: hidden; +} + +.scrollbar-hidden::-webkit-scrollbar-corner { + display: none; +} + +.scrollbar-none::-webkit-scrollbar { + display: none; /* for Chrome, Safari and Opera */ +} + +.scrollbar-none::-webkit-scrollbar-corner { + display: none; +} + +.scrollbar-none { + -ms-overflow-style: none; /* IE and Edge */ + scrollbar-width: none; /* Firefox */ +} + +input::-webkit-outer-spin-button, +input::-webkit-inner-spin-button { + /* display: none; <- Crashes Chrome on hover */ + -webkit-appearance: none; + margin: 0; /* <-- Apparently some margin are still there even though it's hidden */ +} + +input[type='number'] { + -moz-appearance: textfield; /* Firefox */ +} + +.cm-editor { + height: 100%; + width: 100%; +} + +.cm-scroller { + @apply scrollbar-hidden; +} + +.cm-editor.cm-focused { + outline: none; +} + +.tippy-box[data-theme~='dark'] { + @apply rounded-lg bg-gray-950 text-xs border border-gray-900 shadow-xl; +} diff --git a/src/app.d.ts b/src/app.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..f59b884c51ed3c31fc0738fd38d0d75b580df5e4 --- /dev/null +++ b/src/app.d.ts @@ -0,0 +1,12 @@ +// See https://kit.svelte.dev/docs/types#app +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface Platform {} + } +} + +export {}; diff --git a/src/app.html b/src/app.html new file mode 100644 index 0000000000000000000000000000000000000000..5d48e1d7e8532479cb3cd18f38d7871318b60571 --- /dev/null +++ b/src/app.html @@ -0,0 +1,219 @@ + + + + + + + + + + + + + + + Open WebUI + + %sveltekit.head% + + + +
%sveltekit.body%
+ +
+ + + + +
+ + +
+
+ +
+
+
+ + +
+ + + + diff --git a/src/lib/apis/audio/index.ts b/src/lib/apis/audio/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9716c552a76a472185bb06f49097704ac9424e61 --- /dev/null +++ b/src/lib/apis/audio/index.ts @@ -0,0 +1,133 @@ +import { AUDIO_API_BASE_URL } from '$lib/constants'; + +export const getAudioConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${AUDIO_API_BASE_URL}/config`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type OpenAIConfigForm = { + url: string; + key: string; + model: string; + speaker: string; +}; + +export const updateAudioConfig = async (token: string, payload: OpenAIConfigForm) => { + let error = null; + + const res = await fetch(`${AUDIO_API_BASE_URL}/config/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...payload + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const transcribeAudio = async (token: string, file: File) => { + const data = new FormData(); + data.append('file', file); + + let error = null; + const res = await fetch(`${AUDIO_API_BASE_URL}/transcriptions`, { + method: 'POST', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + }, + body: data + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const synthesizeOpenAISpeech = async ( + token: string = '', + speaker: string = 'alloy', + text: string = '', + model?: string +) => { + let error = null; + + const res = await fetch(`${AUDIO_API_BASE_URL}/speech`, { + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + input: text, + voice: speaker, + ...(model && { model }) + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res; + }) + .catch((err) => { + error = err.detail; + console.log(err); + + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/auths/index.ts b/src/lib/apis/auths/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..1bdb74694447514c724257c2c7ac6860d79e05e7 --- /dev/null +++ b/src/lib/apis/auths/index.ts @@ -0,0 +1,525 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const getAdminDetails = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/admin/details`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAdminConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/admin/config`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateAdminConfig = async (token: string, body: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/admin/config`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify(body) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getSessionUser = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + credentials: 'include' + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const userSignIn = async (email: string, password: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signin`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + credentials: 'include', + body: JSON.stringify({ + email: email, + password: password + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const userSignUp = async ( + name: string, + email: string, + password: string, + profile_image_url: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + credentials: 'include', + body: JSON.stringify({ + name: name, + email: email, + password: password, + profile_image_url: profile_image_url + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const addUser = async ( + token: string, + name: string, + email: string, + password: string, + role: string = 'pending' +) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/add`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + name: name, + email: email, + password: password, + role: role + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserProfile = async (token: string, name: string, profileImageUrl: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/update/profile`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + name: name, + profile_image_url: profileImageUrl + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserPassword = async (token: string, password: string, newPassword: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/update/password`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + password: password, + new_password: newPassword + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getSignUpEnabledStatus = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/enabled`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getDefaultUserRole = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/user/role`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateDefaultUserRole = async (token: string, role: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/user/role`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + role: role + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const toggleSignUpEnabledStatus = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/enabled/toggle`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getJWTExpiresDuration = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/token/expires`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateJWTExpiresDuration = async (token: string, duration: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/token/expires/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + duration: duration + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const createAPIKey = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/api_key`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + if (error) { + throw error; + } + return res.api_key; +}; + +export const getAPIKey = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/api_key`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + if (error) { + throw error; + } + return res.api_key; +}; + +export const deleteAPIKey = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/auths/api_key`, { + method: 'DELETE', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + if (error) { + throw error; + } + return res; +}; diff --git a/src/lib/apis/chats/index.ts b/src/lib/apis/chats/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..b046f1b10d5300fdd2babf12afde528c9ac9f4ed --- /dev/null +++ b/src/lib/apis/chats/index.ts @@ -0,0 +1,757 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; +import { getTimeRange } from '$lib/utils'; + +export const createNewChat = async (token: string, chat: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/new`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + chat: chat + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getChatList = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res.map((chat) => ({ + ...chat, + time_range: getTimeRange(chat.updated_at) + })); +}; + +export const getChatListByUserId = async (token: string = '', userId: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/list/user/${userId}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res.map((chat) => ({ + ...chat, + time_range: getTimeRange(chat.updated_at) + })); +}; + +export const getArchivedChatList = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/archived`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAllChats = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/all`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAllArchivedChats = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/all/archived`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAllUserChats = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/all/db`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAllChatTags = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/tags/all`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getChatListByTagName = async (token: string = '', tagName: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/tags`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + name: tagName + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res.map((chat) => ({ + ...chat, + time_range: getTimeRange(chat.updated_at) + })); +}; + +export const getChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getChatByShareId = async (token: string, share_id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/share/${share_id}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const cloneChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/clone`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const shareChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/share`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const archiveChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/archive`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteSharedChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/share`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateChatById = async (token: string, id: string, chat: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + chat: chat + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteChatById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getTagsById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const addTagById = async (token: string, id: string, tagName: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + tag_name: tagName, + chat_id: id + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteTagById = async (token: string, id: string, tagName: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + tag_name: tagName, + chat_id: id + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; +export const deleteTagsById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags/all`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteAllChats = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const archiveAllChats = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/chats/archive/all`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/configs/index.ts b/src/lib/apis/configs/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f53c53c897ebd48cc117d234dbbb80fa5c99735 --- /dev/null +++ b/src/lib/apis/configs/index.ts @@ -0,0 +1,119 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; +import type { Banner } from '$lib/types'; + +export const setDefaultModels = async (token: string, models: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/configs/default/models`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + models: models + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const setDefaultPromptSuggestions = async (token: string, promptSuggestions: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/configs/default/suggestions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + suggestions: promptSuggestions + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getBanners = async (token: string): Promise => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const setBanners = async (token: string, banners: Banner[]) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + banners: banners + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/documents/index.ts b/src/lib/apis/documents/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9d42feb19f6375985278e866ef6ed9f7138223ef --- /dev/null +++ b/src/lib/apis/documents/index.ts @@ -0,0 +1,232 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const createNewDoc = async ( + token: string, + collection_name: string, + filename: string, + name: string, + title: string, + content: object | null = null +) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/create`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + collection_name: collection_name, + filename: filename, + name: name, + title: title, + ...(content ? { content: JSON.stringify(content) } : {}) + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getDocs = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getDocByName = async (token: string, name: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('name', name); + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/docs?${searchParams.toString()}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type DocUpdateForm = { + name: string; + title: string; +}; + +export const updateDocByName = async (token: string, name: string, form: DocUpdateForm) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('name', name); + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/doc/update?${searchParams.toString()}`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: form.name, + title: form.title + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type TagDocForm = { + name: string; + tags: string[]; +}; + +export const tagDocByName = async (token: string, name: string, form: TagDocForm) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('name', name); + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/doc/tags?${searchParams.toString()}`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: form.name, + tags: form.tags + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteDocByName = async (token: string, name: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('name', name); + + const res = await fetch(`${WEBUI_API_BASE_URL}/documents/doc/delete?${searchParams.toString()}`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/files/index.ts b/src/lib/apis/files/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..630a9e7c55675e5c27a979564719bd0fc3c3d5c7 --- /dev/null +++ b/src/lib/apis/files/index.ts @@ -0,0 +1,183 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const uploadFile = async (token: string, file: File) => { + const data = new FormData(); + data.append('file', file); + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/`, { + method: 'POST', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + }, + body: data + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFiles = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFileById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/${id}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFileContentById = async (id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/${id}/content`, { + method: 'GET', + headers: { + Accept: 'application/json' + }, + credentials: 'include' + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return await res.blob(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteFileById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/${id}`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteAllFiles = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/files/all`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/functions/index.ts b/src/lib/apis/functions/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ed3306b3214967e98dee60ab0cf14042bc0bf141 --- /dev/null +++ b/src/lib/apis/functions/index.ts @@ -0,0 +1,455 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const createNewFunction = async (token: string, func: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/create`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...func + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFunctions = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const exportFunctions = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/export`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFunctionById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateFunctionById = async (token: string, id: string, func: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...func + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteFunctionById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/delete`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const toggleFunctionById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/toggle`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const toggleGlobalById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/toggle/global`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFunctionValvesById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getFunctionValvesSpecById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves/spec`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateFunctionValvesById = async (token: string, id: string, valves: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...valves + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserValvesById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves/user`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserValvesSpecById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves/user/spec`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserValvesById = async (token: string, id: string, valves: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/functions/id/${id}/valves/user/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...valves + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/images/index.ts b/src/lib/apis/images/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..3f624704eb4767683f3be50217ac54fa34e880fb --- /dev/null +++ b/src/lib/apis/images/index.ts @@ -0,0 +1,474 @@ +import { IMAGES_API_BASE_URL } from '$lib/constants'; + +export const getImageGenerationConfig = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/config`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateImageGenerationConfig = async ( + token: string = '', + engine: string, + enabled: boolean +) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/config/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + engine, + enabled + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getOpenAIConfig = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/openai/config`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateOpenAIConfig = async (token: string = '', url: string, key: string) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/openai/config/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + url: url, + key: key + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getImageGenerationEngineUrls = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/url`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateImageGenerationEngineUrls = async (token: string = '', urls: object = {}) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/url/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + ...urls + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getImageSize = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/size`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.IMAGE_SIZE; +}; + +export const updateImageSize = async (token: string = '', size: string) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/size/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + size: size + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.IMAGE_SIZE; +}; + +export const getImageSteps = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/steps`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.IMAGE_STEPS; +}; + +export const updateImageSteps = async (token: string = '', steps: number) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/steps/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ steps }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.IMAGE_STEPS; +}; + +export const getImageGenerationModels = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/models`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getDefaultImageGenerationModel = async (token: string = '') => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/models/default`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.model; +}; + +export const updateDefaultImageGenerationModel = async (token: string = '', model: string) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/models/default/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + model: model + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.model; +}; + +export const imageGenerations = async (token: string = '', prompt: string) => { + let error = null; + + const res = await fetch(`${IMAGES_API_BASE_URL}/generations`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + prompt: prompt + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/index.ts b/src/lib/apis/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..c2e90855b677ab488cefa7d8e4812e20878ee6c1 --- /dev/null +++ b/src/lib/apis/index.ts @@ -0,0 +1,949 @@ +import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants'; + +export const getModels = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/models`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + let models = res?.data ?? []; + + models = models + .filter((models) => models) + // Sort the models + .sort((a, b) => { + // Check if models have position property + const aHasPosition = a.info?.meta?.position !== undefined; + const bHasPosition = b.info?.meta?.position !== undefined; + + // If both a and b have the position property + if (aHasPosition && bHasPosition) { + return a.info.meta.position - b.info.meta.position; + } + + // If only a has the position property, it should come first + if (aHasPosition) return -1; + + // If only b has the position property, it should come first + if (bHasPosition) return 1; + + // Compare case-insensitively by name for models without position property + const lowerA = a.name.toLowerCase(); + const lowerB = b.name.toLowerCase(); + + if (lowerA < lowerB) return -1; + if (lowerA > lowerB) return 1; + + // If same case-insensitively, sort by original strings, + // lowercase will come before uppercase due to ASCII values + if (a.name < b.name) return -1; + if (a.name > b.name) return 1; + + return 0; // They are equal + }); + + console.log(models); + return models; +}; + +type ChatCompletedForm = { + model: string; + messages: string[]; + chat_id: string; +}; + +export const chatCompleted = async (token: string, body: ChatCompletedForm) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/chat/completed`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify(body) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type ChatActionForm = { + model: string; + messages: string[]; + chat_id: string; +}; + +export const chatAction = async (token: string, action_id: string, body: ChatActionForm) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/chat/actions/${action_id}`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify(body) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getTaskConfig = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/task/config`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateTaskConfig = async (token: string, config: object) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/task/config/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify(config) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const generateTitle = async ( + token: string = '', + model: string, + prompt: string, + chat_id?: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/task/title/completions`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: prompt, + ...(chat_id && { chat_id: chat_id }) + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat'; +}; + +export const generateEmoji = async ( + token: string = '', + model: string, + prompt: string, + chat_id?: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/task/emoji/completions`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: prompt, + ...(chat_id && { chat_id: chat_id }) + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + const response = res?.choices[0]?.message?.content.replace(/["']/g, '') ?? null; + + if (response) { + if (/\p{Extended_Pictographic}/u.test(response)) { + return response.match(/\p{Extended_Pictographic}/gu)[0]; + } + } + + return null; +}; + +export const generateSearchQuery = async ( + token: string = '', + model: string, + messages: object[], + prompt: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/task/query/completions`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + messages: messages, + prompt: prompt + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? prompt; +}; + +export const getPipelinesList = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/pipelines/list`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + let pipelines = res?.data ?? []; + return pipelines; +}; + +export const uploadPipeline = async (token: string, file: File, urlIdx: string) => { + let error = null; + + // Create a new FormData object to handle the file upload + const formData = new FormData(); + formData.append('file', file); + formData.append('urlIdx', urlIdx); + + const res = await fetch(`${WEBUI_BASE_URL}/api/pipelines/upload`, { + method: 'POST', + headers: { + ...(token && { authorization: `Bearer ${token}` }) + // 'Content-Type': 'multipart/form-data' is not needed as Fetch API will set it automatically + }, + body: formData + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const downloadPipeline = async (token: string, url: string, urlIdx: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/pipelines/add`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + url: url, + urlIdx: urlIdx + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deletePipeline = async (token: string, id: string, urlIdx: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/pipelines/delete`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + id: id, + urlIdx: urlIdx + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getPipelines = async (token: string, urlIdx?: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + if (urlIdx !== undefined) { + searchParams.append('urlIdx', urlIdx); + } + + const res = await fetch(`${WEBUI_BASE_URL}/api/pipelines?${searchParams.toString()}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + let pipelines = res?.data ?? []; + return pipelines; +}; + +export const getPipelineValves = async (token: string, pipeline_id: string, urlIdx: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + if (urlIdx !== undefined) { + searchParams.append('urlIdx', urlIdx); + } + + const res = await fetch( + `${WEBUI_BASE_URL}/api/pipelines/${pipeline_id}/valves?${searchParams.toString()}`, + { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + } + ) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getPipelineValvesSpec = async (token: string, pipeline_id: string, urlIdx: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + if (urlIdx !== undefined) { + searchParams.append('urlIdx', urlIdx); + } + + const res = await fetch( + `${WEBUI_BASE_URL}/api/pipelines/${pipeline_id}/valves/spec?${searchParams.toString()}`, + { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + } + ) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updatePipelineValves = async ( + token: string = '', + pipeline_id: string, + valves: object, + urlIdx: string +) => { + let error = null; + + const searchParams = new URLSearchParams(); + if (urlIdx !== undefined) { + searchParams.append('urlIdx', urlIdx); + } + + const res = await fetch( + `${WEBUI_BASE_URL}/api/pipelines/${pipeline_id}/valves/update?${searchParams.toString()}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify(valves) + } + ) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + + if ('detail' in err) { + error = err.detail; + } else { + error = err; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getBackendConfig = async () => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/config`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json' + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getChangelog = async () => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/changelog`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json' + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getVersionUpdates = async () => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/version/updates`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json' + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getModelFilterConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/config/model/filter`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateModelFilterConfig = async ( + token: string, + enabled: boolean, + models: string[] +) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/config/model/filter`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + enabled: enabled, + models: models + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getWebhookUrl = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/webhook`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res.url; +}; + +export const updateWebhookUrl = async (token: string, url: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/webhook`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + url: url + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res.url; +}; + +export const getCommunitySharingEnabledStatus = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const toggleCommunitySharingEnabledStatus = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing/toggle`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getModelConfig = async (token: string): Promise => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res.models; +}; + +export interface ModelConfig { + id: string; + name: string; + meta: ModelMeta; + base_model_id?: string; + params: ModelParams; +} + +export interface ModelMeta { + description?: string; + capabilities?: object; +} + +export interface ModelParams {} + +export type GlobalModelConfig = ModelConfig[]; + +export const updateModelConfig = async (token: string, config: GlobalModelConfig) => { + let error = null; + + const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + models: config + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/memories/index.ts b/src/lib/apis/memories/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..c3c122adf8be662aa20d2914b067b5675dc1ecd9 --- /dev/null +++ b/src/lib/apis/memories/index.ts @@ -0,0 +1,186 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const getMemories = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const addNewMemory = async (token: string, content: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/add`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + content: content + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateMemoryById = async (token: string, id: string, content: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/${id}/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + content: content + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const queryMemory = async (token: string, content: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/query`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + content: content + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteMemoryById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/${id}`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteMemoriesByUserId = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/memories/user`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/models/index.ts b/src/lib/apis/models/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9faa358d33100d3193b1a9200f361fd835d30098 --- /dev/null +++ b/src/lib/apis/models/index.ts @@ -0,0 +1,167 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const addNewModel = async (token: string, model: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/models/add`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify(model) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getModelInfos = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/models`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getModelById = async (token: string, id: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('id', id); + + const res = await fetch(`${WEBUI_API_BASE_URL}/models?${searchParams.toString()}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateModelById = async (token: string, id: string, model: object) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('id', id); + + const res = await fetch(`${WEBUI_API_BASE_URL}/models/update?${searchParams.toString()}`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify(model) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteModelById = async (token: string, id: string) => { + let error = null; + + const searchParams = new URLSearchParams(); + searchParams.append('id', id); + + const res = await fetch(`${WEBUI_API_BASE_URL}/models/delete?${searchParams.toString()}`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..084d2d5f18a54dc11d73da5beba5c2dd93f700c5 --- /dev/null +++ b/src/lib/apis/ollama/index.ts @@ -0,0 +1,558 @@ +import { OLLAMA_API_BASE_URL } from '$lib/constants'; +import { titleGenerationTemplate } from '$lib/utils'; + +export const getOllamaConfig = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/config`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateOllamaConfig = async (token: string = '', enable_ollama_api: boolean) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/config/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + enable_ollama_api: enable_ollama_api + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getOllamaUrls = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/urls`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OLLAMA_BASE_URLS; +}; + +export const updateOllamaUrls = async (token: string = '', urls: string[]) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/urls/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + urls: urls + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OLLAMA_BASE_URLS; +}; + +export const getOllamaVersion = async (token: string, urlIdx?: number) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/version${urlIdx ? `/${urlIdx}` : ''}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res?.version ?? false; +}; + +export const getOllamaModels = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/tags`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return (res?.models ?? []) + .map((model) => ({ id: model.model, name: model.name ?? model.model, ...model })) + .sort((a, b) => { + return a.name.localeCompare(b.name); + }); +}; + +// TODO: migrate to backend +export const generateTitle = async ( + token: string = '', + template: string, + model: string, + prompt: string +) => { + let error = null; + + template = titleGenerationTemplate(template, prompt); + + console.log(template); + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: template, + stream: false, + options: { + // Restrict the number of tokens generated to 50 + num_predict: 50 + } + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res?.response.replace(/["']/g, '') ?? 'New Chat'; +}; + +export const generatePrompt = async (token: string = '', model: string, conversation: string) => { + let error = null; + + if (conversation === '') { + conversation = '[no existing conversation]'; + } + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: `Conversation: + ${conversation} + + As USER in the conversation above, your task is to continue the conversation. Remember, Your responses should be crafted as if you're a human conversing in a natural, realistic manner, keeping in mind the context and flow of the dialogue. Please generate a fitting response to the last message in the conversation, or if there is no existing conversation, initiate one as a normal person would. + + Response: + ` + }) + }).catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const generateEmbeddings = async (token: string = '', model: string, text: string) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/embeddings`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: text + }) + }).catch((err) => { + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const generateTextCompletion = async (token: string = '', model: string, text: string) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + prompt: text, + stream: true + }) + }).catch((err) => { + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const generateChatCompletion = async (token: string = '', body: object) => { + let controller = new AbortController(); + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/chat`, { + signal: controller.signal, + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify(body) + }).catch((err) => { + error = err; + return null; + }); + + if (error) { + throw error; + } + + return [res, controller]; +}; + +export const createModel = async ( + token: string, + tagName: string, + content: string, + urlIdx: string | null = null +) => { + let error = null; + + const res = await fetch( + `${OLLAMA_API_BASE_URL}/api/create${urlIdx !== null ? `/${urlIdx}` : ''}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: tagName, + modelfile: content + }) + } + ).catch((err) => { + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteModel = async (token: string, tagName: string, urlIdx: string | null = null) => { + let error = null; + + const res = await fetch( + `${OLLAMA_API_BASE_URL}/api/delete${urlIdx !== null ? `/${urlIdx}` : ''}`, + { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: tagName + }) + } + ) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + console.log(json); + return true; + }) + .catch((err) => { + console.log(err); + error = err; + + if ('detail' in err) { + error = err.detail; + } + + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const pullModel = async (token: string, tagName: string, urlIdx: string | null = null) => { + let error = null; + const controller = new AbortController(); + + const res = await fetch(`${OLLAMA_API_BASE_URL}/api/pull${urlIdx !== null ? `/${urlIdx}` : ''}`, { + signal: controller.signal, + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: tagName + }) + }).catch((err) => { + console.log(err); + error = err; + + if ('detail' in err) { + error = err.detail; + } + + return null; + }); + if (error) { + throw error; + } + return [res, controller]; +}; + +export const downloadModel = async ( + token: string, + download_url: string, + urlIdx: string | null = null +) => { + let error = null; + + const res = await fetch( + `${OLLAMA_API_BASE_URL}/models/download${urlIdx !== null ? `/${urlIdx}` : ''}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + url: download_url + }) + } + ).catch((err) => { + console.log(err); + error = err; + + if ('detail' in err) { + error = err.detail; + } + + return null; + }); + if (error) { + throw error; + } + return res; +}; + +export const uploadModel = async (token: string, file: File, urlIdx: string | null = null) => { + let error = null; + + const formData = new FormData(); + formData.append('file', file); + + const res = await fetch( + `${OLLAMA_API_BASE_URL}/models/upload${urlIdx !== null ? `/${urlIdx}` : ''}`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${token}` + }, + body: formData + } + ).catch((err) => { + console.log(err); + error = err; + + if ('detail' in err) { + error = err.detail; + } + + return null; + }); + if (error) { + throw error; + } + return res; +}; + +// export const pullModel = async (token: string, tagName: string) => { +// return await fetch(`${OLLAMA_API_BASE_URL}/pull`, { +// method: 'POST', +// headers: { +// 'Content-Type': 'text/event-stream', +// Authorization: `Bearer ${token}` +// }, +// body: JSON.stringify({ +// name: tagName +// }) +// }); +// }; diff --git a/src/lib/apis/openai/index.ts b/src/lib/apis/openai/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a52ebb3209ef80577732718646c4fe17759ef72 --- /dev/null +++ b/src/lib/apis/openai/index.ts @@ -0,0 +1,455 @@ +import { OPENAI_API_BASE_URL } from '$lib/constants'; +import { titleGenerationTemplate } from '$lib/utils'; +import { type Model, models, settings } from '$lib/stores'; + +export const getOpenAIConfig = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/config`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + enable_openai_api: enable_openai_api + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getOpenAIUrls = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/urls`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OPENAI_API_BASE_URLS; +}; + +export const updateOpenAIUrls = async (token: string = '', urls: string[]) => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/urls/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + urls: urls + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OPENAI_API_BASE_URLS; +}; + +export const getOpenAIKeys = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/keys`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OPENAI_API_KEYS; +}; + +export const updateOpenAIKeys = async (token: string = '', keys: string[]) => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/keys/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + keys: keys + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OPENAI_API_KEYS; +}; + +export const getOpenAIModels = async (token: string, urlIdx?: number) => { + let error = null; + + const res = await fetch( + `${OPENAI_API_BASE_URL}/models${typeof urlIdx === 'number' ? `/${urlIdx}` : ''}`, + { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + } + ) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`; + return []; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getOpenAIModelsDirect = async ( + base_url: string = 'https://api.openai.com/v1', + api_key: string = '' +) => { + let error = null; + + const res = await fetch(`${base_url}/models`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${api_key}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`; + return null; + }); + + if (error) { + throw error; + } + + const models = Array.isArray(res) ? res : res?.data ?? null; + + return models + .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true })) + .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true)) + .sort((a, b) => { + return a.name.localeCompare(b.name); + }); +}; + +export const generateOpenAIChatCompletion = async ( + token: string = '', + body: object, + url: string = OPENAI_API_BASE_URL +): Promise<[Response | null, AbortController]> => { + const controller = new AbortController(); + let error = null; + + const res = await fetch(`${url}/chat/completions`, { + signal: controller.signal, + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify(body) + }).catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return [res, controller]; +}; + +export const synthesizeOpenAISpeech = async ( + token: string = '', + speaker: string = 'alloy', + text: string = '', + model: string = 'tts-1' +) => { + let error = null; + + const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, { + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + model: model, + input: text, + voice: speaker + }) + }).catch((err) => { + console.log(err); + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const generateTitle = async ( + token: string = '', + template: string, + model: string, + prompt: string, + chat_id?: string, + url: string = OPENAI_API_BASE_URL +) => { + let error = null; + + template = titleGenerationTemplate(template, prompt); + + console.log(template); + + const res = await fetch(`${url}/chat/completions`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + messages: [ + { + role: 'user', + content: template + } + ], + stream: false, + // Restricting the max tokens to 50 to avoid long titles + max_tokens: 50, + ...(chat_id && { chat_id: chat_id }), + title: true + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat'; +}; + +export const generateSearchQuery = async ( + token: string = '', + model: string, + previousMessages: string[], + prompt: string, + url: string = OPENAI_API_BASE_URL +): Promise => { + let error = null; + + // TODO: Allow users to specify the prompt + // Get the current date in the format "January 20, 2024" + const currentDate = new Intl.DateTimeFormat('en-US', { + year: 'numeric', + month: 'long', + day: '2-digit' + }).format(new Date()); + + const res = await fetch(`${url}/chat/completions`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + model: model, + // Few shot prompting + messages: [ + { + role: 'assistant', + content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.` + }, + { + role: 'user', + content: prompt + } + // { + // role: 'user', + // content: + // (previousMessages.length > 0 + // ? `Previous Questions:\n${previousMessages.join('\n')}\n\n` + // : '') + `Current Question: ${prompt}` + // } + ], + stream: false, + // Restricting the max tokens to 30 to avoid long search queries + max_tokens: 30 + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } + return undefined; + }); + + if (error) { + throw error; + } + + return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined; +}; diff --git a/src/lib/apis/prompts/index.ts b/src/lib/apis/prompts/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ca9c7d543d7815340a667b74dc1b258f3ff496f8 --- /dev/null +++ b/src/lib/apis/prompts/index.ts @@ -0,0 +1,178 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const createNewPrompt = async ( + token: string, + command: string, + title: string, + content: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/create`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + command: `/${command}`, + title: title, + content: content + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getPrompts = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getPromptByCommand = async (token: string, command: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updatePromptByCommand = async ( + token: string, + command: string, + title: string, + content: string +) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + command: `/${command}`, + title: title, + content: content + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deletePromptByCommand = async (token: string, command: string) => { + let error = null; + + command = command.charAt(0) === '/' ? command.slice(1) : command; + + const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}/delete`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/rag/index.ts b/src/lib/apis/rag/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..5c0a47b35732da9d3378372b979ebcd0c289454d --- /dev/null +++ b/src/lib/apis/rag/index.ts @@ -0,0 +1,620 @@ +import { RAG_API_BASE_URL } from '$lib/constants'; + +export const getRAGConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/config`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type ChunkConfigForm = { + chunk_size: number; + chunk_overlap: number; +}; + +type ContentExtractConfigForm = { + engine: string; + tika_server_url: string | null; +}; + +type YoutubeConfigForm = { + language: string[]; + translation?: string | null; +}; + +type RAGConfigForm = { + pdf_extract_images?: boolean; + chunk?: ChunkConfigForm; + content_extraction?: ContentExtractConfigForm; + web_loader_ssl_verification?: boolean; + youtube?: YoutubeConfigForm; +}; + +export const updateRAGConfig = async (token: string, payload: RAGConfigForm) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/config/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...payload + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getRAGTemplate = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/template`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res?.template ?? ''; +}; + +export const getQuerySettings = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/query/settings`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type QuerySettings = { + k: number | null; + r: number | null; + template: string | null; +}; + +export const updateQuerySettings = async (token: string, settings: QuerySettings) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/query/settings/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...settings + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const processDocToVectorDB = async (token: string, file_id: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/process/doc`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + file_id: file_id + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const uploadDocToVectorDB = async (token: string, collection_name: string, file: File) => { + const data = new FormData(); + data.append('file', file); + data.append('collection_name', collection_name); + + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/doc`, { + method: 'POST', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + }, + body: data + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const uploadWebToVectorDB = async (token: string, collection_name: string, url: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/web`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + url: url, + collection_name: collection_name + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const uploadYoutubeTranscriptionToVectorDB = async (token: string, url: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/youtube`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + url: url + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const queryDoc = async ( + token: string, + collection_name: string, + query: string, + k: number | null = null +) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/query/doc`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + collection_name: collection_name, + query: query, + k: k + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const queryCollection = async ( + token: string, + collection_names: string, + query: string, + k: number | null = null +) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/query/collection`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + collection_names: collection_names, + query: query, + k: k + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const scanDocs = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/scan`, { + method: 'GET', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const resetUploadDir = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/reset/uploads`, { + method: 'GET', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const resetVectorDB = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/reset/db`, { + method: 'GET', + headers: { + Accept: 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getEmbeddingConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/embedding`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type OpenAIConfigForm = { + key: string; + url: string; + batch_size: number; +}; + +type EmbeddingModelUpdateForm = { + openai_config?: OpenAIConfigForm; + embedding_engine: string; + embedding_model: string; +}; + +export const updateEmbeddingConfig = async (token: string, payload: EmbeddingModelUpdateForm) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/embedding/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...payload + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getRerankingConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/reranking`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type RerankingModelUpdateForm = { + reranking_model: string; +}; + +export const updateRerankingConfig = async (token: string, payload: RerankingModelUpdateForm) => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/reranking/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...payload + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const runWebSearch = async ( + token: string, + query: string, + collection_name?: string +): Promise => { + let error = null; + + const res = await fetch(`${RAG_API_BASE_URL}/web/search`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + query, + collection_name: collection_name ?? '' + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export interface SearchDocument { + status: boolean; + collection_name: string; + filenames: string[]; +} diff --git a/src/lib/apis/streaming/index.ts b/src/lib/apis/streaming/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f91edad83cf9afffd371290a2a32722b1a626c30 --- /dev/null +++ b/src/lib/apis/streaming/index.ts @@ -0,0 +1,116 @@ +import { EventSourceParserStream } from 'eventsource-parser/stream'; +import type { ParsedEvent } from 'eventsource-parser'; + +type TextStreamUpdate = { + done: boolean; + value: string; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + citations?: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + error?: any; + usage?: ResponseUsage; +}; + +type ResponseUsage = { + /** Including images and tools if any */ + prompt_tokens: number; + /** The tokens generated */ + completion_tokens: number; + /** Sum of the above two fields */ + total_tokens: number; +}; + +// createOpenAITextStream takes a responseBody with a SSE response, +// and returns an async generator that emits delta updates with large deltas chunked into random sized chunks +export async function createOpenAITextStream( + responseBody: ReadableStream, + splitLargeDeltas: boolean +): Promise> { + const eventStream = responseBody + .pipeThrough(new TextDecoderStream()) + .pipeThrough(new EventSourceParserStream()) + .getReader(); + let iterator = openAIStreamToIterator(eventStream); + if (splitLargeDeltas) { + iterator = streamLargeDeltasAsRandomChunks(iterator); + } + return iterator; +} + +async function* openAIStreamToIterator( + reader: ReadableStreamDefaultReader +): AsyncGenerator { + while (true) { + const { value, done } = await reader.read(); + if (done) { + yield { done: true, value: '' }; + break; + } + if (!value) { + continue; + } + const data = value.data; + if (data.startsWith('[DONE]')) { + yield { done: true, value: '' }; + break; + } + + try { + const parsedData = JSON.parse(data); + console.log(parsedData); + + if (parsedData.error) { + yield { done: true, value: '', error: parsedData.error }; + break; + } + + if (parsedData.citations) { + yield { done: false, value: '', citations: parsedData.citations }; + continue; + } + + yield { + done: false, + value: parsedData.choices?.[0]?.delta?.content ?? '', + usage: parsedData.usage + }; + } catch (e) { + console.error('Error extracting delta from SSE event:', e); + } + } +} + +// streamLargeDeltasAsRandomChunks will chunk large deltas (length > 5) into random sized chunks between 1-3 characters +// This is to simulate a more fluid streaming, even though some providers may send large chunks of text at once +async function* streamLargeDeltasAsRandomChunks( + iterator: AsyncGenerator +): AsyncGenerator { + for await (const textStreamUpdate of iterator) { + if (textStreamUpdate.done) { + yield textStreamUpdate; + return; + } + if (textStreamUpdate.citations) { + yield textStreamUpdate; + continue; + } + let content = textStreamUpdate.value; + if (content.length < 5) { + yield { done: false, value: content }; + continue; + } + while (content != '') { + const chunkSize = Math.min(Math.floor(Math.random() * 3) + 1, content.length); + const chunk = content.slice(0, chunkSize); + yield { done: false, value: chunk }; + // Do not sleep if the tab is hidden + // Timers are throttled to 1s in hidden tabs + if (document?.visibilityState !== 'hidden') { + await sleep(5); + } + content = content.slice(chunkSize); + } + } +} + +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); diff --git a/src/lib/apis/tools/index.ts b/src/lib/apis/tools/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..28e8dde86cff79fd0b1b496a4992f22d366c39f5 --- /dev/null +++ b/src/lib/apis/tools/index.ts @@ -0,0 +1,391 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const createNewTool = async (token: string, tool: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/create`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...tool + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getTools = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const exportTools = async (token: string = '') => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/export`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getToolById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateToolById = async (token: string, id: string, tool: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...tool + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const deleteToolById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/delete`, { + method: 'DELETE', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getToolValvesById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getToolValvesSpecById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves/spec`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateToolValvesById = async (token: string, id: string, valves: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...valves + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserValvesById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves/user`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserValvesSpecById = async (token: string, id: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves/user/spec`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserValvesById = async (token: string, id: string, valves: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/tools/id/${id}/valves/user/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...valves + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .then((json) => { + return json; + }) + .catch((err) => { + error = err.detail; + + console.log(err); + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/users/index.ts b/src/lib/apis/users/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..0b22b7171569eafff1b67a664efb6cfffe2c0968 --- /dev/null +++ b/src/lib/apis/users/index.ts @@ -0,0 +1,336 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; +import { getUserPosition } from '$lib/utils'; + +export const getUserPermissions = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/permissions/user`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserPermissions = async (token: string, permissions: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/permissions/user`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...permissions + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserRole = async (token: string, id: string, role: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/update/role`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + id: id, + role: role + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUsers = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res ? res : []; +}; + +export const getUserSettings = async (token: string) => { + let error = null; + const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserSettings = async (token: string, settings: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...settings + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserById = async (token: string, userId: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getUserInfo = async (token: string) => { + let error = null; + const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/info`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const updateUserInfo = async (token: string, info: object) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/info/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + ...info + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const getAndUpdateUserLocation = async (token: string) => { + const location = await getUserPosition().catch((err) => { + throw err; + }); + + if (location) { + await updateUserInfo(token, { location: location }); + return location; + } else { + throw new Error('Failed to get user location'); + } +}; + +export const deleteUserById = async (token: string, userId: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}`, { + method: 'DELETE', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +type UserUpdateForm = { + profile_image_url: string; + email: string; + name: string; + password: string; +}; + +export const updateUserById = async (token: string, userId: string, user: UserUpdateForm) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}/update`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + profile_image_url: user.profile_image_url, + email: user.email, + name: user.name, + password: user.password !== '' ? user.password : undefined + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/apis/utils/index.ts b/src/lib/apis/utils/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f99ac564106ec83079097e1a6f4acc9a558f6080 --- /dev/null +++ b/src/lib/apis/utils/index.ts @@ -0,0 +1,179 @@ +import { WEBUI_API_BASE_URL } from '$lib/constants'; + +export const getGravatarUrl = async (email: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/utils/gravatar?email=${email}`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json' + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + return res; +}; + +export const formatPythonCode = async (code: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/utils/code/format`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + code: code + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + + error = err; + if (err.detail) { + error = err.detail; + } + return null; + }); + + if (error) { + throw error; + } + + return res; +}; + +export const downloadChatAsPDF = async (chat: object) => { + let error = null; + + const blob = await fetch(`${WEBUI_API_BASE_URL}/utils/pdf`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + title: chat.title, + messages: chat.messages + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.blob(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + return blob; +}; + +export const getHTMLFromMarkdown = async (md: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/utils/markdown`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + md: md + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + error = err; + return null; + }); + + return res.html; +}; + +export const downloadDatabase = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/utils/db/download`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (response) => { + if (!response.ok) { + throw await response.json(); + } + return response.blob(); + }) + .then((blob) => { + const url = window.URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'webui.db'; + document.body.appendChild(a); + a.click(); + window.URL.revokeObjectURL(url); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } +}; + +export const downloadLiteLLMConfig = async (token: string) => { + let error = null; + + const res = await fetch(`${WEBUI_API_BASE_URL}/utils/litellm/config`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + } + }) + .then(async (response) => { + if (!response.ok) { + throw await response.json(); + } + return response.blob(); + }) + .then((blob) => { + const url = window.URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'config.yaml'; + document.body.appendChild(a); + a.click(); + window.URL.revokeObjectURL(url); + }) + .catch((err) => { + console.log(err); + error = err.detail; + return null; + }); + + if (error) { + throw error; + } +}; diff --git a/src/lib/components/AddFilesPlaceholder.svelte b/src/lib/components/AddFilesPlaceholder.svelte new file mode 100644 index 0000000000000000000000000000000000000000..3bdbe9281a07d19b03752397e4be2bb200fb90ff --- /dev/null +++ b/src/lib/components/AddFilesPlaceholder.svelte @@ -0,0 +1,13 @@ + + +
📄
+
{$i18n.t('Add Files')}
+ +
+ {$i18n.t('Drop any files here to add to the conversation')} +
+
diff --git a/src/lib/components/ChangelogModal.svelte b/src/lib/components/ChangelogModal.svelte new file mode 100644 index 0000000000000000000000000000000000000000..48156f92473315ed60d1b814ef73dc21ee95491d --- /dev/null +++ b/src/lib/components/ChangelogModal.svelte @@ -0,0 +1,117 @@ + + + +
+
+
+ {$i18n.t('What’s New in')} + {$WEBUI_NAME} + +
+ +
+
+
{$i18n.t('Release Notes')}
+
+
+ v{WEBUI_VERSION} +
+
+
+ +
+
+
+ {#if changelog} + {#each Object.keys(changelog) as version} +
+
+ v{version} - {changelog[version].date} +
+ +
+ + {#each Object.keys(changelog[version]).filter((section) => section !== 'date') as section} +
+
+ {section} +
+ +
+ {#each Object.keys(changelog[version][section]) as item} +
+
+ {changelog[version][section][item].title} +
+
{changelog[version][section][item].content}
+
+ {/each} +
+
+ {/each} +
+ {/each} + {/if} +
+
+
+ +
+
+ diff --git a/src/lib/components/admin/AddUserModal.svelte b/src/lib/components/admin/AddUserModal.svelte new file mode 100644 index 0000000000000000000000000000000000000000..8538ba04a8a0e52b9d822403f9b7e92029ff1009 --- /dev/null +++ b/src/lib/components/admin/AddUserModal.svelte @@ -0,0 +1,337 @@ + + + +
+
+
{$i18n.t('Add User')}
+ +
+ +
+
+
{ + submitHandler(); + }} + > +
+ + + +
+
+ {#if tab === ''} +
+
{$i18n.t('Role')}
+ +
+ +
+
+ +
+
{$i18n.t('Name')}
+ +
+ +
+
+ +
+ +
+
{$i18n.t('Email')}
+ +
+ +
+
+ +
+
{$i18n.t('Password')}
+ +
+ +
+
+ {:else if tab === 'import'} +
+
+ + + +
+ +
+ ⓘ {$i18n.t( + 'Ensure your CSV file includes 4 columns in this order: Name, Email, Password, Role.' + )} + + {$i18n.t('Click here to download user import template file.')} + +
+
+ {/if} +
+ +
+ +
+
+
+
+
+
+ + diff --git a/src/lib/components/admin/EditUserModal.svelte b/src/lib/components/admin/EditUserModal.svelte new file mode 100644 index 0000000000000000000000000000000000000000..dcaff95b662a8c307515bb9ec274d4515a8d24e7 --- /dev/null +++ b/src/lib/components/admin/EditUserModal.svelte @@ -0,0 +1,174 @@ + + + +
+
+
{$i18n.t('Edit User')}
+ +
+
+ +
+
+
{ + submitHandler(); + }} + > +
+
+ User profile +
+ +
+
{selectedUser.name}
+ +
+ {$i18n.t('Created at')} + {dayjs(selectedUser.created_at * 1000).format($i18n.t('MMMM DD, YYYY'))} +
+
+
+ +
+ +
+
+
{$i18n.t('Email')}
+ +
+ +
+
+ +
+
{$i18n.t('Name')}
+ +
+ +
+
+ +
+
{$i18n.t('New Password')}
+ +
+ +
+
+
+ +
+ +
+
+
+
+
+
+ + diff --git a/src/lib/components/admin/Settings.svelte b/src/lib/components/admin/Settings.svelte new file mode 100644 index 0000000000000000000000000000000000000000..afb8736ea13bd60af6f2bf881b7feb96b6a48fb1 --- /dev/null +++ b/src/lib/components/admin/Settings.svelte @@ -0,0 +1,404 @@ + + +
+
+ + + + + + + + + + + + + + + + + + + + + +
+ +
+ {#if selectedTab === 'general'} + { + toast.success($i18n.t('Settings saved successfully!')); + }} + /> + {:else if selectedTab === 'users'} + { + toast.success($i18n.t('Settings saved successfully!')); + }} + /> + {:else if selectedTab === 'connections'} + { + toast.success($i18n.t('Settings saved successfully!')); + }} + /> + {:else if selectedTab === 'models'} + + {:else if selectedTab === 'documents'} + { + toast.success($i18n.t('Settings saved successfully!')); + }} + /> + {:else if selectedTab === 'web'} + { + toast.success($i18n.t('Settings saved successfully!')); + + await tick(); + await config.set(await getBackendConfig()); + }} + /> + {:else if selectedTab === 'interface'} + { + toast.success($i18n.t('Settings saved successfully!')); + }} + /> + {:else if selectedTab === 'audio'} +
+
diff --git a/src/lib/components/admin/Settings/Audio.svelte b/src/lib/components/admin/Settings/Audio.svelte new file mode 100644 index 0000000000000000000000000000000000000000..50ce7418e50d0750f0c5d9a9f219f517990abeff --- /dev/null +++ b/src/lib/components/admin/Settings/Audio.svelte @@ -0,0 +1,350 @@ + + +
{ + await updateConfigHandler(); + dispatch('save'); + }} +> +
+
+
+
{$i18n.t('STT Settings')}
+ +
+
{$i18n.t('Speech-to-Text Engine')}
+
+ +
+
+ + {#if STT_ENGINE === 'openai'} +
+
+ + + +
+
+ +
+ +
+
{$i18n.t('STT Model')}
+
+
+ + + + +
+
+
+ {/if} +
+ +
+ +
+
{$i18n.t('TTS Settings')}
+ +
+
{$i18n.t('Text-to-Speech Engine')}
+
+ +
+
+ + {#if TTS_ENGINE === 'openai'} +
+
+ + + +
+
+ {:else if TTS_ENGINE === 'elevenlabs'} +
+
+ +
+
+ {/if} + +
+ + {#if TTS_ENGINE === ''} +
+
{$i18n.t('TTS Voice')}
+
+
+ +
+
+
+ {:else if TTS_ENGINE === 'openai'} +
+
+
{$i18n.t('TTS Voice')}
+
+
+ + + + {#each voices as voice} + +
+
+
+
+
{$i18n.t('TTS Model')}
+
+
+ + + + {#each models as model} + +
+
+
+
+ {:else if TTS_ENGINE === 'elevenlabs'} +
+
+
{$i18n.t('TTS Voice')}
+
+
+ + + + {#each voices as voice} + +
+
+
+
+
{$i18n.t('TTS Model')}
+
+
+ + + + {#each models as model} + +
+
+
+
+ {/if} +
+
+
+
+ +
+
diff --git a/src/lib/components/admin/Settings/Connections.svelte b/src/lib/components/admin/Settings/Connections.svelte new file mode 100644 index 0000000000000000000000000000000000000000..fe71e4816dcf48b45dd9a01bb26b02af49735071 --- /dev/null +++ b/src/lib/components/admin/Settings/Connections.svelte @@ -0,0 +1,446 @@ + + +
{ + updateOpenAIHandler(); + updateOllamaUrlsHandler(); + + dispatch('save'); + }} +> +
+ {#if ENABLE_OPENAI_API !== null && ENABLE_OLLAMA_API !== null} +
+
+
+
{$i18n.t('OpenAI API')}
+ +
+ { + updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API); + }} + /> +
+
+ + {#if ENABLE_OPENAI_API} +
+ {#each OPENAI_API_BASE_URLS as url, idx} +
+
+ + + {#if pipelineUrls[url]} +
+ + + + + + + +
+ {/if} +
+ + +
+ {#if idx === 0} + + {:else} + + {/if} +
+ +
+ + + +
+
+
+ {$i18n.t('WebUI will make requests to')} + '{url}/models' +
+ {/each} +
+ {/if} +
+
+ +
+ +
+
+
{$i18n.t('Ollama API')}
+ +
+ { + updateOllamaConfig(localStorage.token, ENABLE_OLLAMA_API); + + if (OLLAMA_BASE_URLS.length === 0) { + OLLAMA_BASE_URLS = ['']; + } + }} + /> +
+
+ {#if ENABLE_OLLAMA_API} +
+
+ {#each OLLAMA_BASE_URLS as url, idx} +
+ + +
+ {#if idx === 0} + + {:else} + + {/if} +
+ +
+ + + +
+
+ {/each} +
+
+ +
+ {$i18n.t('Trouble accessing Ollama?')} + + {$i18n.t('Click here for help.')} + +
+ {/if} +
+ {:else} +
+
+ +
+
+ {/if} +
+ +
+ +
+
diff --git a/src/lib/components/admin/Settings/Database.svelte b/src/lib/components/admin/Settings/Database.svelte new file mode 100644 index 0000000000000000000000000000000000000000..0ba45263e11a407708d936207be46fb80e49a5d1 --- /dev/null +++ b/src/lib/components/admin/Settings/Database.svelte @@ -0,0 +1,146 @@ + + +
{ + saveHandler(); + }} +> +
+
+
{$i18n.t('Database')}
+ + {#if $config?.features.enable_admin_export ?? true} +
+ + + +
+ + + {/if} + +
+ +
+ + + +
+
+
+ + +
diff --git a/src/lib/components/admin/Settings/Documents.svelte b/src/lib/components/admin/Settings/Documents.svelte new file mode 100644 index 0000000000000000000000000000000000000000..1b0b2c3fa8803c9e5e7f7416f514eec6b01bb6c0 --- /dev/null +++ b/src/lib/components/admin/Settings/Documents.svelte @@ -0,0 +1,792 @@ + + + { + const res = await deleteAllFiles(localStorage.token).catch((error) => { + toast.error(error); + return null; + }); + + if (res) { + toast.success($i18n.t('Success')); + } + }} +/> + + { + const res = resetVectorDB(localStorage.token).catch((error) => { + toast.error(error); + return null; + }); + + if (res) { + toast.success($i18n.t('Success')); + } + }} +/> + +
{ + submitHandler(); + saveHandler(); + }} +> +
+
+
{$i18n.t('General Settings')}
+ +
+
+ {$i18n.t('Scan for documents from {{path}}', { path: 'DOCS_DIR (/data/docs)' })} +
+ + +
+ +
+
{$i18n.t('Embedding Model Engine')}
+
+ +
+
+ + {#if embeddingEngine === 'openai'} +
+ + + +
+
+
{$i18n.t('Embedding Batch Size')}
+
+ +
+
+ +
+
+ {/if} + +
+
{$i18n.t('Hybrid Search')}
+ + +
+
+ +
+ +
+
+
{$i18n.t('Embedding Model')}
+ + {#if embeddingEngine === 'ollama'} +
+
+ +
+
+ {:else} +
+
+ +
+ + {#if embeddingEngine === ''} + + {/if} +
+ {/if} + +
+ {$i18n.t( + 'Warning: If you update or change your embedding model, you will need to re-import all documents.' + )} +
+ + {#if querySettings.hybrid === true} +
+
{$i18n.t('Reranking Model')}
+ +
+
+ +
+ +
+
+ {/if} +
+ +
+ +
+
{$i18n.t('Content Extraction')}
+ +
+
{$i18n.t('Engine')}
+
+ +
+
+ + {#if showTikaServerUrl} +
+
+ +
+
+ {/if} +
+
+ +
+
{$i18n.t('Query Params')}
+ +
+
+
{$i18n.t('Top K')}
+ +
+ +
+
+ + {#if querySettings.hybrid === true} +
+
+ {$i18n.t('Minimum Score')} +
+ +
+ +
+
+ {/if} +
+ + {#if querySettings.hybrid === true} +
+ {$i18n.t( + 'Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.' + )} +
+ +
+ {/if} + +
+
{$i18n.t('RAG Template')}
+