├── .gitattributes ├── .github ├── pull_request_template.md └── workflows │ ├── claude.yml │ ├── python.yml │ ├── release.yml │ └── typescript.yml ├── .gitignore ├── .npmrc ├── .vscode └── settings.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── package-lock.json ├── package.json ├── scripts └── release.py ├── src ├── everything │ ├── CLAUDE.md │ ├── Dockerfile │ ├── README.md │ ├── everything.ts │ ├── index.ts │ ├── instructions.md │ ├── package.json │ ├── sse.ts │ ├── stdio.ts │ ├── streamableHttp.ts │ └── tsconfig.json ├── fetch │ ├── .python-version │ ├── Dockerfile │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_fetch │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ └── server.py │ └── uv.lock ├── filesystem │ ├── Dockerfile │ ├── README.md │ ├── __tests__ │ │ ├── directory-tree.test.ts │ │ ├── lib.test.ts │ │ ├── path-utils.test.ts │ │ ├── path-validation.test.ts │ │ └── roots-utils.test.ts │ ├── index.ts │ ├── jest.config.cjs │ ├── lib.ts │ ├── package.json │ ├── path-utils.ts │ ├── path-validation.ts │ ├── roots-utils.ts │ └── tsconfig.json ├── git │ ├── .gitignore │ ├── .python-version │ ├── Dockerfile │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_git │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ ├── py.typed │ │ │ └── server.py │ ├── tests │ │ └── test_server.py │ └── uv.lock ├── memory │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── sequentialthinking │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json └── time │ ├── .python-version │ ├── Dockerfile │ ├── README.md │ ├── pyproject.toml │ ├── src │ └── mcp_server_time │ │ ├── __init__.py │ │ ├── __main__.py │ │ └── server.py │ ├── test │ └── time_server_test.py │ └── uv.lock └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | package-lock.json linguist-generated=true 2 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Description 4 | 5 | ## Server Details 6 | 7 | - Server: 8 | - Changes to: 9 | 10 | ## Motivation and Context 11 | 12 | 13 | ## How Has This Been Tested? 14 | 15 | 16 | ## Breaking Changes 17 | 18 | 19 | ## Types of changes 20 | 21 | - [ ] Bug fix (non-breaking change which fixes an issue) 22 | - [ ] New feature (non-breaking change which adds functionality) 23 | - [ ] Breaking change (fix or feature that would cause existing functionality to change) 24 | - [ ] Documentation update 25 | 26 | ## Checklist 27 | 28 | - [ ] I have read the [MCP Protocol Documentation](https://modelcontextprotocol.io) 29 | - [ ] My changes follows MCP security best practices 30 | - [ ] I have updated the server's README accordingly 31 | - [ ] I have tested this with an LLM client 32 | - [ ] My code follows the repository's style guidelines 33 | - [ ] New and existing tests pass locally 34 | - [ ] I have added appropriate error handling 35 | - [ ] I have documented all environment variables and configuration options 36 | 37 | ## Additional context 38 | 39 | -------------------------------------------------------------------------------- /.github/workflows/claude.yml: -------------------------------------------------------------------------------- 1 | name: Claude Code 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | issues: 9 | types: [opened, assigned] 10 | pull_request_review: 11 | types: [submitted] 12 | 13 | jobs: 14 | claude: 15 | if: | 16 | (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || 17 | (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || 18 | (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || 19 | (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | pull-requests: read 24 | issues: read 25 | id-token: write 26 | actions: read 27 | steps: 28 | - name: Checkout repository 29 | uses: actions/checkout@v4 30 | with: 31 | fetch-depth: 1 32 | 33 | - name: Run Claude Code 34 | id: claude 35 | uses: anthropics/claude-code-action@beta 36 | with: 37 | anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} 38 | 39 | # Allow Claude to read CI results on PRs 40 | additional_permissions: | 41 | actions: read 42 | 43 | # Trigger when assigned to an issue 44 | assignee_trigger: "claude" 45 | 46 | # Allow Claude to run bash 47 | # This should be safe given the repo is already public 48 | allowed_tools: "Bash" 49 | 50 | custom_instructions: | 51 | If posting a comment to GitHub, give a concise summary of the comment at the top and put all the details in a
block. 52 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | name: Python 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Find Python packages 20 | id: find-packages 21 | working-directory: src 22 | run: | 23 | PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 24 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 25 | 26 | test: 27 | needs: [detect-packages] 28 | strategy: 29 | matrix: 30 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 31 | name: Test ${{ matrix.package }} 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v4 35 | 36 | - name: Install uv 37 | uses: astral-sh/setup-uv@v3 38 | 39 | - name: Set up Python 40 | uses: actions/setup-python@v5 41 | with: 42 | python-version-file: "src/${{ matrix.package }}/.python-version" 43 | 44 | - name: Install dependencies 45 | working-directory: src/${{ matrix.package }} 46 | run: uv sync --frozen --all-extras --dev 47 | 48 | - name: Check if tests exist 49 | id: check-tests 50 | working-directory: src/${{ matrix.package }} 51 | run: | 52 | if [ -d "tests" ] || [ -d "test" ] || grep -q "pytest" pyproject.toml; then 53 | echo "has-tests=true" >> $GITHUB_OUTPUT 54 | else 55 | echo "has-tests=false" >> $GITHUB_OUTPUT 56 | fi 57 | 58 | - name: Run tests 59 | if: steps.check-tests.outputs.has-tests == 'true' 60 | working-directory: src/${{ matrix.package }} 61 | run: uv run pytest 62 | 63 | build: 64 | needs: [detect-packages, test] 65 | strategy: 66 | matrix: 67 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 68 | name: Build ${{ matrix.package }} 69 | runs-on: ubuntu-latest 70 | steps: 71 | - uses: actions/checkout@v4 72 | 73 | - name: Install uv 74 | uses: astral-sh/setup-uv@v3 75 | 76 | - name: Set up Python 77 | uses: actions/setup-python@v5 78 | with: 79 | python-version-file: "src/${{ matrix.package }}/.python-version" 80 | 81 | - name: Install dependencies 82 | working-directory: src/${{ matrix.package }} 83 | run: uv sync --frozen --all-extras --dev 84 | 85 | - name: Run pyright 86 | working-directory: src/${{ matrix.package }} 87 | run: uv run --frozen pyright 88 | 89 | - name: Build package 90 | working-directory: src/${{ matrix.package }} 91 | run: uv build 92 | 93 | - name: Upload artifacts 94 | uses: actions/upload-artifact@v4 95 | with: 96 | name: dist-${{ matrix.package }} 97 | path: src/${{ matrix.package }}/dist/ 98 | 99 | publish: 100 | runs-on: ubuntu-latest 101 | needs: [build, detect-packages] 102 | if: github.event_name == 'release' 103 | 104 | strategy: 105 | matrix: 106 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 107 | name: Publish ${{ matrix.package }} 108 | 109 | environment: release 110 | permissions: 111 | id-token: write # Required for trusted publishing 112 | 113 | steps: 114 | - name: Download artifacts 115 | uses: actions/download-artifact@v4 116 | with: 117 | name: dist-${{ matrix.package }} 118 | path: dist/ 119 | 120 | - name: Publish package to PyPI 121 | uses: pypa/gh-action-pypi-publish@release/v1 122 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Automatic Release Creation 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: '0 10 * * *' 7 | 8 | jobs: 9 | create-metadata: 10 | runs-on: ubuntu-latest 11 | if: github.repository_owner == 'modelcontextprotocol' 12 | outputs: 13 | hash: ${{ steps.last-release.outputs.hash }} 14 | version: ${{ steps.create-version.outputs.version}} 15 | npm_packages: ${{ steps.create-npm-packages.outputs.npm_packages}} 16 | pypi_packages: ${{ steps.create-pypi-packages.outputs.pypi_packages}} 17 | steps: 18 | - uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Get last release hash 23 | id: last-release 24 | run: | 25 | HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1") 26 | echo "hash=${HASH}" >> $GITHUB_OUTPUT 27 | echo "Using last release hash: ${HASH}" 28 | 29 | - name: Install uv 30 | uses: astral-sh/setup-uv@v5 31 | 32 | - name: Create version name 33 | id: create-version 34 | run: | 35 | VERSION=$(uv run --script scripts/release.py generate-version) 36 | echo "version $VERSION" 37 | echo "version=$VERSION" >> $GITHUB_OUTPUT 38 | 39 | - name: Create notes 40 | run: | 41 | HASH="${{ steps.last-release.outputs.hash }}" 42 | uv run --script scripts/release.py generate-notes --directory src/ $HASH > RELEASE_NOTES.md 43 | cat RELEASE_NOTES.md 44 | 45 | - name: Release notes 46 | uses: actions/upload-artifact@v4 47 | with: 48 | name: release-notes 49 | path: RELEASE_NOTES.md 50 | 51 | - name: Create python matrix 52 | id: create-pypi-packages 53 | run: | 54 | HASH="${{ steps.last-release.outputs.hash }}" 55 | PYPI=$(uv run --script scripts/release.py generate-matrix --pypi --directory src $HASH) 56 | echo "pypi_packages $PYPI" 57 | echo "pypi_packages=$PYPI" >> $GITHUB_OUTPUT 58 | 59 | - name: Create npm matrix 60 | id: create-npm-packages 61 | run: | 62 | HASH="${{ steps.last-release.outputs.hash }}" 63 | NPM=$(uv run --script scripts/release.py generate-matrix --npm --directory src $HASH) 64 | echo "npm_packages $NPM" 65 | echo "npm_packages=$NPM" >> $GITHUB_OUTPUT 66 | 67 | update-packages: 68 | needs: [create-metadata] 69 | if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }} 70 | runs-on: ubuntu-latest 71 | environment: release 72 | outputs: 73 | changes_made: ${{ steps.commit.outputs.changes_made }} 74 | steps: 75 | - uses: actions/checkout@v4 76 | with: 77 | fetch-depth: 0 78 | 79 | - name: Install uv 80 | uses: astral-sh/setup-uv@v5 81 | 82 | - name: Update packages 83 | run: | 84 | HASH="${{ needs.create-metadata.outputs.hash }}" 85 | uv run --script scripts/release.py update-packages --directory src/ $HASH 86 | 87 | - name: Configure git 88 | run: | 89 | git config --global user.name "GitHub Actions" 90 | git config --global user.email "actions@github.com" 91 | 92 | - name: Commit changes 93 | id: commit 94 | run: | 95 | VERSION="${{ needs.create-metadata.outputs.version }}" 96 | git add -u 97 | if git diff-index --quiet HEAD; then 98 | echo "changes_made=false" >> $GITHUB_OUTPUT 99 | else 100 | git commit -m 'Automatic update of packages' 101 | git tag -a "$VERSION" -m "Release $VERSION" 102 | git push origin "$VERSION" 103 | echo "changes_made=true" >> $GITHUB_OUTPUT 104 | fi 105 | 106 | publish-pypi: 107 | needs: [update-packages, create-metadata] 108 | if: ${{ needs.create-metadata.outputs.pypi_packages != '[]' && needs.create-metadata.outputs.pypi_packages != '' }} 109 | strategy: 110 | fail-fast: false 111 | matrix: 112 | package: ${{ fromJson(needs.create-metadata.outputs.pypi_packages) }} 113 | name: Build ${{ matrix.package }} 114 | environment: release 115 | permissions: 116 | id-token: write # Required for trusted publishing 117 | runs-on: ubuntu-latest 118 | steps: 119 | - uses: actions/checkout@v4 120 | with: 121 | ref: ${{ needs.create-metadata.outputs.version }} 122 | 123 | - name: Install uv 124 | uses: astral-sh/setup-uv@v5 125 | 126 | - name: Set up Python 127 | uses: actions/setup-python@v5 128 | with: 129 | python-version-file: "src/${{ matrix.package }}/.python-version" 130 | 131 | - name: Install dependencies 132 | working-directory: src/${{ matrix.package }} 133 | run: uv sync --frozen --all-extras --dev 134 | 135 | - name: Run pyright 136 | working-directory: src/${{ matrix.package }} 137 | run: uv run --frozen pyright 138 | 139 | - name: Build package 140 | working-directory: src/${{ matrix.package }} 141 | run: uv build 142 | 143 | - name: Publish package to PyPI 144 | uses: pypa/gh-action-pypi-publish@release/v1 145 | with: 146 | packages-dir: src/${{ matrix.package }}/dist 147 | 148 | publish-npm: 149 | needs: [update-packages, create-metadata] 150 | if: ${{ needs.create-metadata.outputs.npm_packages != '[]' && needs.create-metadata.outputs.npm_packages != '' }} 151 | strategy: 152 | fail-fast: false 153 | matrix: 154 | package: ${{ fromJson(needs.create-metadata.outputs.npm_packages) }} 155 | name: Build ${{ matrix.package }} 156 | environment: release 157 | runs-on: ubuntu-latest 158 | steps: 159 | - uses: actions/checkout@v4 160 | with: 161 | ref: ${{ needs.create-metadata.outputs.version }} 162 | 163 | - uses: actions/setup-node@v4 164 | with: 165 | node-version: 22 166 | cache: npm 167 | registry-url: 'https://registry.npmjs.org' 168 | 169 | - name: Install dependencies 170 | working-directory: src/${{ matrix.package }} 171 | run: npm ci 172 | 173 | - name: Check if version exists on npm 174 | working-directory: src/${{ matrix.package }} 175 | run: | 176 | VERSION=$(jq -r .version package.json) 177 | if npm view --json | jq -e --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then 178 | echo "Version $VERSION already exists on npm" 179 | exit 1 180 | fi 181 | echo "Version $VERSION is new, proceeding with publish" 182 | 183 | - name: Build package 184 | working-directory: src/${{ matrix.package }} 185 | run: npm run build 186 | 187 | - name: Publish package 188 | working-directory: src/${{ matrix.package }} 189 | run: | 190 | npm publish --access public 191 | env: 192 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 193 | 194 | create-release: 195 | needs: [update-packages, create-metadata, publish-pypi, publish-npm] 196 | if: needs.update-packages.outputs.changes_made == 'true' 197 | runs-on: ubuntu-latest 198 | environment: release 199 | permissions: 200 | contents: write 201 | steps: 202 | - uses: actions/checkout@v4 203 | 204 | - name: Download release notes 205 | uses: actions/download-artifact@v4 206 | with: 207 | name: release-notes 208 | 209 | - name: Create release 210 | env: 211 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN}} 212 | run: | 213 | VERSION="${{ needs.create-metadata.outputs.version }}" 214 | gh release create "$VERSION" \ 215 | --title "Release $VERSION" \ 216 | --notes-file RELEASE_NOTES.md 217 | 218 | -------------------------------------------------------------------------------- /.github/workflows/typescript.yml: -------------------------------------------------------------------------------- 1 | name: TypeScript 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | - name: Find JS packages 19 | id: find-packages 20 | working-directory: src 21 | run: | 22 | PACKAGES=$(find . -name package.json -not -path "*/node_modules/*" -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 23 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 24 | 25 | test: 26 | needs: [detect-packages] 27 | strategy: 28 | matrix: 29 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 30 | name: Test ${{ matrix.package }} 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - uses: actions/setup-node@v4 36 | with: 37 | node-version: 22 38 | cache: npm 39 | 40 | - name: Install dependencies 41 | working-directory: src/${{ matrix.package }} 42 | run: npm ci 43 | 44 | - name: Check if tests exist 45 | id: check-tests 46 | working-directory: src/${{ matrix.package }} 47 | run: | 48 | if npm run test --silent 2>/dev/null; then 49 | echo "has-tests=true" >> $GITHUB_OUTPUT 50 | else 51 | echo "has-tests=false" >> $GITHUB_OUTPUT 52 | fi 53 | continue-on-error: true 54 | 55 | - name: Run tests 56 | if: steps.check-tests.outputs.has-tests == 'true' 57 | working-directory: src/${{ matrix.package }} 58 | run: npm test 59 | 60 | build: 61 | needs: [detect-packages, test] 62 | strategy: 63 | matrix: 64 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 65 | name: Build ${{ matrix.package }} 66 | runs-on: ubuntu-latest 67 | steps: 68 | - uses: actions/checkout@v4 69 | 70 | - uses: actions/setup-node@v4 71 | with: 72 | node-version: 22 73 | cache: npm 74 | 75 | - name: Install dependencies 76 | working-directory: src/${{ matrix.package }} 77 | run: npm ci 78 | 79 | - name: Build package 80 | working-directory: src/${{ matrix.package }} 81 | run: npm run build 82 | 83 | publish: 84 | runs-on: ubuntu-latest 85 | needs: [build, detect-packages] 86 | if: github.event_name == 'release' 87 | environment: release 88 | 89 | strategy: 90 | matrix: 91 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 92 | name: Publish ${{ matrix.package }} 93 | 94 | permissions: 95 | contents: read 96 | id-token: write 97 | 98 | steps: 99 | - uses: actions/checkout@v4 100 | - uses: actions/setup-node@v4 101 | with: 102 | node-version: 22 103 | cache: npm 104 | registry-url: "https://registry.npmjs.org" 105 | 106 | - name: Install dependencies 107 | working-directory: src/${{ matrix.package }} 108 | run: npm ci 109 | 110 | - name: Publish package 111 | working-directory: src/${{ matrix.package }} 112 | run: npm publish --access public 113 | env: 114 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 115 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # Jetbrains IDEs 126 | .idea/ 127 | 128 | # yarn v2 129 | .yarn/cache 130 | .yarn/unplugged 131 | .yarn/build-state.yml 132 | .yarn/install-state.gz 133 | .pnp.* 134 | 135 | build/ 136 | 137 | gcp-oauth.keys.json 138 | .*-server-credentials.json 139 | 140 | # Byte-compiled / optimized / DLL files 141 | __pycache__/ 142 | *.py[cod] 143 | *$py.class 144 | 145 | # C extensions 146 | *.so 147 | 148 | # Distribution / packaging 149 | .Python 150 | build/ 151 | develop-eggs/ 152 | dist/ 153 | downloads/ 154 | eggs/ 155 | .eggs/ 156 | lib/ 157 | lib64/ 158 | parts/ 159 | sdist/ 160 | var/ 161 | wheels/ 162 | share/python-wheels/ 163 | *.egg-info/ 164 | .installed.cfg 165 | *.egg 166 | MANIFEST 167 | 168 | # PyInstaller 169 | # Usually these files are written by a python script from a template 170 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 171 | *.manifest 172 | *.spec 173 | 174 | # Installer logs 175 | pip-log.txt 176 | pip-delete-this-directory.txt 177 | 178 | # Unit test / coverage reports 179 | htmlcov/ 180 | .tox/ 181 | .nox/ 182 | .coverage 183 | .coverage.* 184 | .cache 185 | nosetests.xml 186 | coverage.xml 187 | *.cover 188 | *.py,cover 189 | .hypothesis/ 190 | .pytest_cache/ 191 | cover/ 192 | 193 | # Translations 194 | *.mo 195 | *.pot 196 | 197 | # Django stuff: 198 | *.log 199 | local_settings.py 200 | db.sqlite3 201 | db.sqlite3-journal 202 | 203 | # Flask stuff: 204 | instance/ 205 | .webassets-cache 206 | 207 | # Scrapy stuff: 208 | .scrapy 209 | 210 | # Sphinx documentation 211 | docs/_build/ 212 | 213 | # PyBuilder 214 | .pybuilder/ 215 | target/ 216 | 217 | # Jupyter Notebook 218 | .ipynb_checkpoints 219 | 220 | # IPython 221 | profile_default/ 222 | ipython_config.py 223 | 224 | # pyenv 225 | # For a library or package, you might want to ignore these files since the code is 226 | # intended to run in multiple environments; otherwise, check them in: 227 | # .python-version 228 | 229 | # pipenv 230 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 231 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 232 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 233 | # install all needed dependencies. 234 | #Pipfile.lock 235 | 236 | # poetry 237 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 238 | # This is especially recommended for binary packages to ensure reproducibility, and is more 239 | # commonly ignored for libraries. 240 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 241 | #poetry.lock 242 | 243 | # pdm 244 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 245 | #pdm.lock 246 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 247 | # in version control. 248 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 249 | .pdm.toml 250 | .pdm-python 251 | .pdm-build/ 252 | 253 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 254 | __pypackages__/ 255 | 256 | # Celery stuff 257 | celerybeat-schedule 258 | celerybeat.pid 259 | 260 | # SageMath parsed files 261 | *.sage.py 262 | 263 | # Environments 264 | .env 265 | .venv 266 | env/ 267 | venv/ 268 | ENV/ 269 | env.bak/ 270 | venv.bak/ 271 | 272 | # Spyder project settings 273 | .spyderproject 274 | .spyproject 275 | 276 | # Rope project settings 277 | .ropeproject 278 | 279 | # mkdocs documentation 280 | /site 281 | 282 | # mypy 283 | .mypy_cache/ 284 | .dmypy.json 285 | dmypy.json 286 | 287 | # Pyre type checker 288 | .pyre/ 289 | 290 | # pytype static type analyzer 291 | .pytype/ 292 | 293 | # Cython debug symbols 294 | cython_debug/ 295 | 296 | .DS_Store 297 | 298 | # PyCharm 299 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 300 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 301 | # and can be added to the global gitignore or merged into this file. For a more nuclear 302 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 303 | #.idea/ 304 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | registry="https://registry.npmjs.org/" 2 | @modelcontextprotocol:registry="https://registry.npmjs.org/" 3 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | mcp-coc@anthropic.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to MCP Servers 2 | 3 | Thanks for your interest in contributing! Here's how you can help make this repo better. 4 | 5 | We accept changes through [the standard GitHub flow model](https://docs.github.com/en/get-started/using-github/github-flow). 6 | 7 | ## Server Listings 8 | 9 | We welcome PRs that add links to your servers in the [README.md](./README.md)! 10 | 11 | ## Server Implementations 12 | 13 | We welcome: 14 | - **Bug fixes** — Help us squash those pesky bugs. 15 | - **Usability improvements** — Making servers easier to use for humans and agents. 16 | - **Enhancements that demonstrate MCP protocol features** — We encourage contributions that help reference servers better illustrate underutilized aspects of the MCP protocol beyond just Tools, such as Resources, Prompts, or Roots. For example, adding Roots support to filesystem-server helps showcase this important but lesser-known feature. 17 | 18 | We're more selective about: 19 | - **Other new features** — Especially if they're not crucial to the server's core purpose or are highly opinionated. The existing servers are reference servers meant to inspire the community. If you need specific features, we encourage you to build enhanced versions! We think a diverse ecosystem of servers is beneficial for everyone, and would love to link to your improved server in our README. 20 | 21 | We don't accept: 22 | - **New server implementations** — We encourage you to publish them yourself, and link to them from the README. 23 | 24 | ## Documentation 25 | 26 | Improvements to existing documentation is welcome - although generally we'd prefer ergonomic improvements than documenting pain points if possible! 27 | 28 | We're more selective about adding wholly new documentation, especially in ways that aren't vendor neutral (e.g. how to run a particular server with a particular client). 29 | 30 | ## Community 31 | 32 | [Learn how the MCP community communicates](https://modelcontextprotocol.io/community/communication). 33 | 34 | Thank you for helping make MCP servers better for everyone! -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Anthropic, PBC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | Thank you for helping us keep our MCP servers secure. 3 | 4 | The **reference servers** in this repo are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project. 5 | 6 | The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities. 7 | 8 | ## Vulnerability Disclosure Program 9 | 10 | Our Vulnerability Program guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). We ask that any validated vulnerability in this functionality be reported through the [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability). 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/servers", 3 | "private": true, 4 | "version": "0.6.2", 5 | "description": "Model Context Protocol servers", 6 | "license": "MIT", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "type": "module", 11 | "workspaces": [ 12 | "src/*" 13 | ], 14 | "files": [], 15 | "scripts": { 16 | "build": "npm run build --workspaces", 17 | "watch": "npm run watch --workspaces", 18 | "publish-all": "npm publish --workspaces --access public", 19 | "link-all": "npm link --workspaces" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/server-everything": "*", 23 | "@modelcontextprotocol/server-memory": "*", 24 | "@modelcontextprotocol/server-filesystem": "*", 25 | "@modelcontextprotocol/server-sequential-thinking": "*" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /scripts/release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env uv run --script 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "click>=8.1.8", 6 | # "tomlkit>=0.13.2" 7 | # ] 8 | # /// 9 | import sys 10 | import re 11 | import click 12 | from pathlib import Path 13 | import json 14 | import tomlkit 15 | import datetime 16 | import subprocess 17 | from dataclasses import dataclass 18 | from typing import Any, Iterator, NewType, Protocol 19 | 20 | 21 | Version = NewType("Version", str) 22 | GitHash = NewType("GitHash", str) 23 | 24 | 25 | class GitHashParamType(click.ParamType): 26 | name = "git_hash" 27 | 28 | def convert( 29 | self, value: Any, param: click.Parameter | None, ctx: click.Context | None 30 | ) -> GitHash | None: 31 | if value is None: 32 | return None 33 | 34 | if not (8 <= len(value) <= 40): 35 | self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}") 36 | 37 | if not re.match(r"^[0-9a-fA-F]+$", value): 38 | self.fail("Git hash must contain only hex digits (0-9, a-f)") 39 | 40 | try: 41 | # Verify hash exists in repo 42 | subprocess.run( 43 | ["git", "rev-parse", "--verify", value], check=True, capture_output=True 44 | ) 45 | except subprocess.CalledProcessError: 46 | self.fail(f"Git hash {value} not found in repository") 47 | 48 | return GitHash(value.lower()) 49 | 50 | 51 | GIT_HASH = GitHashParamType() 52 | 53 | 54 | class Package(Protocol): 55 | path: Path 56 | 57 | def package_name(self) -> str: ... 58 | 59 | def update_version(self, version: Version) -> None: ... 60 | 61 | 62 | @dataclass 63 | class NpmPackage: 64 | path: Path 65 | 66 | def package_name(self) -> str: 67 | with open(self.path / "package.json", "r") as f: 68 | return json.load(f)["name"] 69 | 70 | def update_version(self, version: Version): 71 | with open(self.path / "package.json", "r+") as f: 72 | data = json.load(f) 73 | data["version"] = version 74 | f.seek(0) 75 | json.dump(data, f, indent=2) 76 | f.truncate() 77 | 78 | 79 | @dataclass 80 | class PyPiPackage: 81 | path: Path 82 | 83 | def package_name(self) -> str: 84 | with open(self.path / "pyproject.toml") as f: 85 | toml_data = tomlkit.parse(f.read()) 86 | name = toml_data.get("project", {}).get("name") 87 | if not name: 88 | raise Exception("No name in pyproject.toml project section") 89 | return str(name) 90 | 91 | def update_version(self, version: Version): 92 | # Update version in pyproject.toml 93 | with open(self.path / "pyproject.toml") as f: 94 | data = tomlkit.parse(f.read()) 95 | data["project"]["version"] = version 96 | 97 | with open(self.path / "pyproject.toml", "w") as f: 98 | f.write(tomlkit.dumps(data)) 99 | 100 | 101 | def has_changes(path: Path, git_hash: GitHash) -> bool: 102 | """Check if any files changed between current state and git hash""" 103 | try: 104 | output = subprocess.run( 105 | ["git", "diff", "--name-only", git_hash, "--", "."], 106 | cwd=path, 107 | check=True, 108 | capture_output=True, 109 | text=True, 110 | ) 111 | 112 | changed_files = [Path(f) for f in output.stdout.splitlines()] 113 | relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]] 114 | return len(relevant_files) >= 1 115 | except subprocess.CalledProcessError: 116 | return False 117 | 118 | 119 | def gen_version() -> Version: 120 | """Generate version based on current date""" 121 | now = datetime.datetime.now() 122 | return Version(f"{now.year}.{now.month}.{now.day}") 123 | 124 | 125 | def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]: 126 | for path in directory.glob("*/package.json"): 127 | if has_changes(path.parent, git_hash): 128 | yield NpmPackage(path.parent) 129 | for path in directory.glob("*/pyproject.toml"): 130 | if has_changes(path.parent, git_hash): 131 | yield PyPiPackage(path.parent) 132 | 133 | 134 | @click.group() 135 | def cli(): 136 | pass 137 | 138 | 139 | @cli.command("update-packages") 140 | @click.option( 141 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 142 | ) 143 | @click.argument("git_hash", type=GIT_HASH) 144 | def update_packages(directory: Path, git_hash: GitHash) -> int: 145 | # Detect package type 146 | path = directory.resolve(strict=True) 147 | version = gen_version() 148 | 149 | for package in find_changed_packages(path, git_hash): 150 | name = package.package_name() 151 | package.update_version(version) 152 | 153 | click.echo(f"{name}@{version}") 154 | 155 | return 0 156 | 157 | 158 | @cli.command("generate-notes") 159 | @click.option( 160 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 161 | ) 162 | @click.argument("git_hash", type=GIT_HASH) 163 | def generate_notes(directory: Path, git_hash: GitHash) -> int: 164 | # Detect package type 165 | path = directory.resolve(strict=True) 166 | version = gen_version() 167 | 168 | click.echo(f"# Release : v{version}") 169 | click.echo("") 170 | click.echo("## Updated packages") 171 | for package in find_changed_packages(path, git_hash): 172 | name = package.package_name() 173 | click.echo(f"- {name}@{version}") 174 | 175 | return 0 176 | 177 | 178 | @cli.command("generate-version") 179 | def generate_version() -> int: 180 | # Detect package type 181 | click.echo(gen_version()) 182 | return 0 183 | 184 | 185 | @cli.command("generate-matrix") 186 | @click.option( 187 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 188 | ) 189 | @click.option("--npm", is_flag=True, default=False) 190 | @click.option("--pypi", is_flag=True, default=False) 191 | @click.argument("git_hash", type=GIT_HASH) 192 | def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int: 193 | # Detect package type 194 | path = directory.resolve(strict=True) 195 | version = gen_version() 196 | 197 | changes = [] 198 | for package in find_changed_packages(path, git_hash): 199 | pkg = package.path.relative_to(path) 200 | if npm and isinstance(package, NpmPackage): 201 | changes.append(str(pkg)) 202 | if pypi and isinstance(package, PyPiPackage): 203 | changes.append(str(pkg)) 204 | 205 | click.echo(json.dumps(changes)) 206 | return 0 207 | 208 | 209 | if __name__ == "__main__": 210 | sys.exit(cli()) 211 | -------------------------------------------------------------------------------- /src/everything/CLAUDE.md: -------------------------------------------------------------------------------- 1 | # MCP "Everything" Server - Development Guidelines 2 | 3 | ## Build, Test & Run Commands 4 | - Build: `npm run build` - Compiles TypeScript to JavaScript 5 | - Watch mode: `npm run watch` - Watches for changes and rebuilds automatically 6 | - Run server: `npm run start` - Starts the MCP server using stdio transport 7 | - Run SSE server: `npm run start:sse` - Starts the MCP server with SSE transport 8 | - Prepare release: `npm run prepare` - Builds the project for publishing 9 | 10 | ## Code Style Guidelines 11 | - Use ES modules with `.js` extension in import paths 12 | - Strictly type all functions and variables with TypeScript 13 | - Follow zod schema patterns for tool input validation 14 | - Prefer async/await over callbacks and Promise chains 15 | - Place all imports at top of file, grouped by external then internal 16 | - Use descriptive variable names that clearly indicate purpose 17 | - Implement proper cleanup for timers and resources in server shutdown 18 | - Follow camelCase for variables/functions, PascalCase for types/classes, UPPER_CASE for constants 19 | - Handle errors with try/catch blocks and provide clear error messages 20 | - Use consistent indentation (2 spaces) and trailing commas in multi-line objects -------------------------------------------------------------------------------- /src/everything/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/everything /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | FROM node:22-alpine AS release 11 | 12 | WORKDIR /app 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | RUN npm ci --ignore-scripts --omit-dev 21 | 22 | CMD ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/everything/README.md: -------------------------------------------------------------------------------- 1 | # Everything MCP Server 2 | 3 | This MCP server attempts to exercise all the features of the MCP protocol. It is not intended to be a useful server, but rather a test server for builders of MCP clients. It implements prompts, tools, resources, sampling, and more to showcase MCP capabilities. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | 1. `echo` 10 | - Simple tool to echo back input messages 11 | - Input: 12 | - `message` (string): Message to echo back 13 | - Returns: Text content with echoed message 14 | 15 | 2. `add` 16 | - Adds two numbers together 17 | - Inputs: 18 | - `a` (number): First number 19 | - `b` (number): Second number 20 | - Returns: Text result of the addition 21 | 22 | 3. `longRunningOperation` 23 | - Demonstrates progress notifications for long operations 24 | - Inputs: 25 | - `duration` (number, default: 10): Duration in seconds 26 | - `steps` (number, default: 5): Number of progress steps 27 | - Returns: Completion message with duration and steps 28 | - Sends progress notifications during execution 29 | 30 | 4. `printEnv` 31 | - Prints all environment variables 32 | - Useful for debugging MCP server configuration 33 | - No inputs required 34 | - Returns: JSON string of all environment variables 35 | 36 | 5. `sampleLLM` 37 | - Demonstrates LLM sampling capability using MCP sampling feature 38 | - Inputs: 39 | - `prompt` (string): The prompt to send to the LLM 40 | - `maxTokens` (number, default: 100): Maximum tokens to generate 41 | - Returns: Generated LLM response 42 | 43 | 6. `getTinyImage` 44 | - Returns a small test image 45 | - No inputs required 46 | - Returns: Base64 encoded PNG image data 47 | 48 | 7. `annotatedMessage` 49 | - Demonstrates how annotations can be used to provide metadata about content 50 | - Inputs: 51 | - `messageType` (enum: "error" | "success" | "debug"): Type of message to demonstrate different annotation patterns 52 | - `includeImage` (boolean, default: false): Whether to include an example image 53 | - Returns: Content with varying annotations: 54 | - Error messages: High priority (1.0), visible to both user and assistant 55 | - Success messages: Medium priority (0.7), user-focused 56 | - Debug messages: Low priority (0.3), assistant-focused 57 | - Optional image: Medium priority (0.5), user-focused 58 | - Example annotations: 59 | ```json 60 | { 61 | "priority": 1.0, 62 | "audience": ["user", "assistant"] 63 | } 64 | ``` 65 | 66 | 8. `getResourceReference` 67 | - Returns a resource reference that can be used by MCP clients 68 | - Inputs: 69 | - `resourceId` (number, 1-100): ID of the resource to reference 70 | - Returns: A resource reference with: 71 | - Text introduction 72 | - Embedded resource with `type: "resource"` 73 | - Text instruction for using the resource URI 74 | 75 | 9. `startElicitation` 76 | - Initiates an elicitation (interaction) within the MCP client. 77 | - Inputs: 78 | - `color` (string): Favorite color 79 | - `number` (number, 1-100): Favorite number 80 | - `pets` (enum): Favorite pet 81 | - Returns: Confirmation of the elicitation demo with selection summary. 82 | 83 | 10. `structuredContent` 84 | - Demonstrates a tool returning structured content using the example in the specification 85 | - Provides an output schema to allow testing of client SHOULD advisory to validate the result using the schema 86 | - Inputs: 87 | - `location` (string): A location or ZIP code, mock data is returned regardless of value 88 | - Returns: a response with 89 | - `structuredContent` field conformant to the output schema 90 | - A backward compatible Text Content field, a SHOULD advisory in the specification 91 | 92 | 11. `listRoots` 93 | - Lists the current MCP roots provided by the client 94 | - Demonstrates the roots protocol capability even though this server doesn't access files 95 | - No inputs required 96 | - Returns: List of current roots with their URIs and names, or a message if no roots are set 97 | - Shows how servers can interact with the MCP roots protocol 98 | 99 | ### Resources 100 | 101 | The server provides 100 test resources in two formats: 102 | - Even numbered resources: 103 | - Plaintext format 104 | - URI pattern: `test://static/resource/{even_number}` 105 | - Content: Simple text description 106 | 107 | - Odd numbered resources: 108 | - Binary blob format 109 | - URI pattern: `test://static/resource/{odd_number}` 110 | - Content: Base64 encoded binary data 111 | 112 | Resource features: 113 | - Supports pagination (10 items per page) 114 | - Allows subscribing to resource updates 115 | - Demonstrates resource templates 116 | - Auto-updates subscribed resources every 5 seconds 117 | 118 | ### Prompts 119 | 120 | 1. `simple_prompt` 121 | - Basic prompt without arguments 122 | - Returns: Single message exchange 123 | 124 | 2. `complex_prompt` 125 | - Advanced prompt demonstrating argument handling 126 | - Required arguments: 127 | - `temperature` (string): Temperature setting 128 | - Optional arguments: 129 | - `style` (string): Output style preference 130 | - Returns: Multi-turn conversation with images 131 | 132 | 3. `resource_prompt` 133 | - Demonstrates embedding resource references in prompts 134 | - Required arguments: 135 | - `resourceId` (number): ID of the resource to embed (1-100) 136 | - Returns: Multi-turn conversation with an embedded resource reference 137 | - Shows how to include resources directly in prompt messages 138 | 139 | ### Roots 140 | 141 | The server demonstrates the MCP roots protocol capability: 142 | 143 | - Declares `roots: { listChanged: true }` capability to indicate support for roots 144 | - Handles `roots/list_changed` notifications from clients 145 | - Requests initial roots during server initialization 146 | - Provides a `listRoots` tool to display current roots 147 | - Logs roots-related events for demonstration purposes 148 | 149 | Note: This server doesn't actually access files, but demonstrates how servers can interact with the roots protocol for clients that need to understand which directories are available for file operations. 150 | 151 | ### Logging 152 | 153 | The server sends random-leveled log messages every 15 seconds, e.g.: 154 | 155 | ```json 156 | { 157 | "method": "notifications/message", 158 | "params": { 159 | "level": "info", 160 | "data": "Info-level message" 161 | } 162 | } 163 | ``` 164 | 165 | ## Usage with Claude Desktop (uses [stdio Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#stdio)) 166 | 167 | Add to your `claude_desktop_config.json`: 168 | 169 | ```json 170 | { 171 | "mcpServers": { 172 | "everything": { 173 | "command": "npx", 174 | "args": [ 175 | "-y", 176 | "@modelcontextprotocol/server-everything" 177 | ] 178 | } 179 | } 180 | } 181 | ``` 182 | 183 | ## Usage with VS Code 184 | 185 | For quick installation, use of of the one-click install buttons below... 186 | 187 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D&quality=insiders) 188 | 189 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D&quality=insiders) 190 | 191 | For manual installation, you can configure the MCP server using one of these methods: 192 | 193 | **Method 1: User Configuration (Recommended)** 194 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 195 | 196 | **Method 2: Workspace Configuration** 197 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 198 | 199 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/mcp). 200 | 201 | #### NPX 202 | 203 | ```json 204 | { 205 | "servers": { 206 | "everything": { 207 | "command": "npx", 208 | "args": ["-y", "@modelcontextprotocol/server-everything"] 209 | } 210 | } 211 | } 212 | ``` 213 | 214 | ## Running from source with [HTTP+SSE Transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) (deprecated as of [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports)) 215 | 216 | ```shell 217 | cd src/everything 218 | npm install 219 | npm run start:sse 220 | ``` 221 | 222 | ## Run from source with [Streamable HTTP Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http) 223 | 224 | ```shell 225 | cd src/everything 226 | npm install 227 | npm run start:streamableHttp 228 | ``` 229 | 230 | ## Running as an installed package 231 | ### Install 232 | ```shell 233 | npm install -g @modelcontextprotocol/server-everything@latest 234 | ```` 235 | 236 | ### Run the default (stdio) server 237 | ```shell 238 | npx @modelcontextprotocol/server-everything 239 | ``` 240 | 241 | ### Or specify stdio explicitly 242 | ```shell 243 | npx @modelcontextprotocol/server-everything stdio 244 | ``` 245 | 246 | ### Run the SSE server 247 | ```shell 248 | npx @modelcontextprotocol/server-everything sse 249 | ``` 250 | 251 | ### Run the streamable HTTP server 252 | ```shell 253 | npx @modelcontextprotocol/server-everything streamableHttp 254 | ``` 255 | 256 | -------------------------------------------------------------------------------- /src/everything/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Parse command line arguments first 4 | const args = process.argv.slice(2); 5 | const scriptName = args[0] || 'stdio'; 6 | 7 | async function run() { 8 | try { 9 | // Dynamically import only the requested module to prevent all modules from initializing 10 | switch (scriptName) { 11 | case 'stdio': 12 | // Import and run the default server 13 | await import('./stdio.js'); 14 | break; 15 | case 'sse': 16 | // Import and run the SSE server 17 | await import('./sse.js'); 18 | break; 19 | case 'streamableHttp': 20 | // Import and run the streamable HTTP server 21 | await import('./streamableHttp.js'); 22 | break; 23 | default: 24 | console.error(`Unknown script: ${scriptName}`); 25 | console.log('Available scripts:'); 26 | console.log('- stdio'); 27 | console.log('- sse'); 28 | console.log('- streamableHttp'); 29 | process.exit(1); 30 | } 31 | } catch (error) { 32 | console.error('Error running script:', error); 33 | process.exit(1); 34 | } 35 | } 36 | 37 | run(); 38 | -------------------------------------------------------------------------------- /src/everything/instructions.md: -------------------------------------------------------------------------------- 1 | Testing and demonstration server for MCP protocol features. 2 | 3 | ## Resources 4 | 5 | Resources 1-100 follow pattern: even IDs contain text, odd IDs contain binary data. Resources are paginated at 10 items per page with cursor-based navigation. 6 | 7 | ## Key dependencies 8 | 9 | Progress notifications require `_meta.progressToken` in tool calls. Resource subscriptions generate updates every 10 seconds. 10 | 11 | ## Performance characteristics 12 | 13 | Server generates automatic log messages every 20 seconds (filtered by current log level), stderr notifications every 30 seconds, and resource update notifications every 10 seconds for active subscriptions. Log level changes affect message filtering in real-time. 14 | 15 | ## Multi-modal testing 16 | 17 | `complex_prompt` includes both text arguments and image content for testing client multi-modal handling. `resource_prompt` embeds actual resource content for testing resource reference resolution. 18 | 19 | Argument completion is available for prompt parameters and resource IDs. Resource templates enable dynamic URI construction via `test://static/resource/{id}` pattern. 20 | 21 | ## Easter egg 22 | 23 | If asked about server instructions, respond with "? Server instructions are working! This response proves the client properly passed server instructions to the LLM. This demonstrates MCP's instructions feature in action." 24 | -------------------------------------------------------------------------------- /src/everything/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-everything", 3 | "version": "0.6.2", 4 | "description": "MCP server that exercises all the features of the MCP protocol", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-everything": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx cp instructions.md dist/ && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch", 20 | "start": "node dist/index.js", 21 | "start:sse": "node dist/sse.js", 22 | "start:streamableHttp": "node dist/streamableHttp.js" 23 | }, 24 | "dependencies": { 25 | "@modelcontextprotocol/sdk": "^1.17.5", 26 | "express": "^4.21.1", 27 | "zod": "^3.23.8", 28 | "zod-to-json-schema": "^3.23.5" 29 | }, 30 | "devDependencies": { 31 | "@types/express": "^5.0.0", 32 | "shx": "^0.3.4", 33 | "typescript": "^5.6.2" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/everything/sse.ts: -------------------------------------------------------------------------------- 1 | import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; 2 | import express from "express"; 3 | import { createServer } from "./everything.js"; 4 | 5 | console.error('Starting SSE server...'); 6 | 7 | const app = express(); 8 | 9 | const transports: Map = new Map(); 10 | 11 | app.get("/sse", async (req, res) => { 12 | let transport: SSEServerTransport; 13 | const { server, cleanup, startNotificationIntervals } = createServer(); 14 | 15 | if (req?.query?.sessionId) { 16 | const sessionId = (req?.query?.sessionId as string); 17 | transport = transports.get(sessionId) as SSEServerTransport; 18 | console.error("Client Reconnecting? This shouldn't happen; when client has a sessionId, GET /sse should not be called again.", transport.sessionId); 19 | } else { 20 | // Create and store transport for new session 21 | transport = new SSEServerTransport("/message", res); 22 | transports.set(transport.sessionId, transport); 23 | 24 | // Connect server to transport 25 | await server.connect(transport); 26 | console.error("Client Connected: ", transport.sessionId); 27 | 28 | // Start notification intervals after client connects 29 | startNotificationIntervals(transport.sessionId); 30 | 31 | // Handle close of connection 32 | server.onclose = async () => { 33 | console.error("Client Disconnected: ", transport.sessionId); 34 | transports.delete(transport.sessionId); 35 | await cleanup(); 36 | }; 37 | 38 | } 39 | 40 | }); 41 | 42 | app.post("/message", async (req, res) => { 43 | const sessionId = (req?.query?.sessionId as string); 44 | const transport = transports.get(sessionId); 45 | if (transport) { 46 | console.error("Client Message from", sessionId); 47 | await transport.handlePostMessage(req, res); 48 | } else { 49 | console.error(`No transport found for sessionId ${sessionId}`) 50 | } 51 | }); 52 | 53 | const PORT = process.env.PORT || 3001; 54 | app.listen(PORT, () => { 55 | console.error(`Server is running on port ${PORT}`); 56 | }); 57 | -------------------------------------------------------------------------------- /src/everything/stdio.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 4 | import { createServer } from "./everything.js"; 5 | import { 6 | LoggingLevel, 7 | LoggingLevelSchema, 8 | LoggingMessageNotification, 9 | SetLevelRequestSchema 10 | } from "@modelcontextprotocol/sdk/types.js"; 11 | 12 | console.error('Starting default (STDIO) server...'); 13 | 14 | async function main() { 15 | const transport = new StdioServerTransport(); 16 | const {server, cleanup, startNotificationIntervals } = createServer(); 17 | 18 | // Currently, for STDIO servers, automatic log-level support is not available, as levels are tracked by sessionId. 19 | // The listener will be set, so if the STDIO server advertises support for logging, and the client sends a setLevel 20 | // request, it will be handled and thus not throw a "Method not found" error. However, the STDIO server will need to 21 | // implement its own listener and level handling for now. This will be remediated in a future SDK version. 22 | 23 | let logLevel: LoggingLevel = "debug"; 24 | server.setRequestHandler(SetLevelRequestSchema, async (request) => { 25 | const { level } = request.params; 26 | logLevel = level; 27 | return {}; 28 | }); 29 | 30 | server.sendLoggingMessage = async (params: LoggingMessageNotification["params"], _: string|undefined): Promise => { 31 | const LOG_LEVEL_SEVERITY = new Map( 32 | LoggingLevelSchema.options.map((level, index) => [level, index]) 33 | ); 34 | 35 | const isMessageIgnored = (level: LoggingLevel): boolean => { 36 | const currentLevel = logLevel; 37 | return (currentLevel) 38 | ? LOG_LEVEL_SEVERITY.get(level)! < LOG_LEVEL_SEVERITY.get(currentLevel)! 39 | : false; 40 | }; 41 | 42 | if (!isMessageIgnored(params.level)) { 43 | return server.notification({method: "notifications/message", params}) 44 | } 45 | 46 | } 47 | 48 | await server.connect(transport); 49 | startNotificationIntervals(); 50 | 51 | // Cleanup on exit 52 | process.on("SIGINT", async () => { 53 | await cleanup(); 54 | await server.close(); 55 | process.exit(0); 56 | }); 57 | } 58 | 59 | main().catch((error) => { 60 | console.error("Server error:", error); 61 | process.exit(1); 62 | }); 63 | 64 | -------------------------------------------------------------------------------- /src/everything/streamableHttp.ts: -------------------------------------------------------------------------------- 1 | import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js"; 2 | import { InMemoryEventStore } from '@modelcontextprotocol/sdk/examples/shared/inMemoryEventStore.js'; 3 | import express, { Request, Response } from "express"; 4 | import { createServer } from "./everything.js"; 5 | import { randomUUID } from 'node:crypto'; 6 | 7 | console.error('Starting Streamable HTTP server...'); 8 | 9 | const app = express(); 10 | 11 | const transports: Map = new Map(); 12 | 13 | app.post('/mcp', async (req: Request, res: Response) => { 14 | console.error('Received MCP POST request'); 15 | try { 16 | // Check for existing session ID 17 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 18 | let transport: StreamableHTTPServerTransport; 19 | 20 | if (sessionId && transports.has(sessionId)) { 21 | // Reuse existing transport 22 | transport = transports.get(sessionId)!; 23 | } else if (!sessionId) { 24 | 25 | const { server, cleanup, startNotificationIntervals } = createServer(); 26 | 27 | // New initialization request 28 | const eventStore = new InMemoryEventStore(); 29 | transport = new StreamableHTTPServerTransport({ 30 | sessionIdGenerator: () => randomUUID(), 31 | eventStore, // Enable resumability 32 | onsessioninitialized: (sessionId: string) => { 33 | // Store the transport by session ID when session is initialized 34 | // This avoids race conditions where requests might come in before the session is stored 35 | console.error(`Session initialized with ID: ${sessionId}`); 36 | transports.set(sessionId, transport); 37 | } 38 | }); 39 | 40 | 41 | // Set up onclose handler to clean up transport when closed 42 | server.onclose = async () => { 43 | const sid = transport.sessionId; 44 | if (sid && transports.has(sid)) { 45 | console.error(`Transport closed for session ${sid}, removing from transports map`); 46 | transports.delete(sid); 47 | await cleanup(); 48 | } 49 | }; 50 | 51 | // Connect the transport to the MCP server BEFORE handling the request 52 | // so responses can flow back through the same transport 53 | await server.connect(transport); 54 | 55 | await transport.handleRequest(req, res); 56 | 57 | // Wait until initialize is complete and transport will have a sessionId 58 | startNotificationIntervals(transport.sessionId); 59 | 60 | return; // Already handled 61 | } else { 62 | // Invalid request - no session ID or not initialization request 63 | res.status(400).json({ 64 | jsonrpc: '2.0', 65 | error: { 66 | code: -32000, 67 | message: 'Bad Request: No valid session ID provided', 68 | }, 69 | id: req?.body?.id, 70 | }); 71 | return; 72 | } 73 | 74 | // Handle the request with existing transport - no need to reconnect 75 | // The existing transport is already connected to the server 76 | await transport.handleRequest(req, res); 77 | } catch (error) { 78 | console.error('Error handling MCP request:', error); 79 | if (!res.headersSent) { 80 | res.status(500).json({ 81 | jsonrpc: '2.0', 82 | error: { 83 | code: -32603, 84 | message: 'Internal server error', 85 | }, 86 | id: req?.body?.id, 87 | }); 88 | return; 89 | } 90 | } 91 | }); 92 | 93 | // Handle GET requests for SSE streams (using built-in support from StreamableHTTP) 94 | app.get('/mcp', async (req: Request, res: Response) => { 95 | console.error('Received MCP GET request'); 96 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 97 | if (!sessionId || !transports.has(sessionId)) { 98 | res.status(400).json({ 99 | jsonrpc: '2.0', 100 | error: { 101 | code: -32000, 102 | message: 'Bad Request: No valid session ID provided', 103 | }, 104 | id: req?.body?.id, 105 | }); 106 | return; 107 | } 108 | 109 | // Check for Last-Event-ID header for resumability 110 | const lastEventId = req.headers['last-event-id'] as string | undefined; 111 | if (lastEventId) { 112 | console.error(`Client reconnecting with Last-Event-ID: ${lastEventId}`); 113 | } else { 114 | console.error(`Establishing new SSE stream for session ${sessionId}`); 115 | } 116 | 117 | const transport = transports.get(sessionId); 118 | await transport!.handleRequest(req, res); 119 | }); 120 | 121 | // Handle DELETE requests for session termination (according to MCP spec) 122 | app.delete('/mcp', async (req: Request, res: Response) => { 123 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 124 | if (!sessionId || !transports.has(sessionId)) { 125 | res.status(400).json({ 126 | jsonrpc: '2.0', 127 | error: { 128 | code: -32000, 129 | message: 'Bad Request: No valid session ID provided', 130 | }, 131 | id: req?.body?.id, 132 | }); 133 | return; 134 | } 135 | 136 | console.error(`Received session termination request for session ${sessionId}`); 137 | 138 | try { 139 | const transport = transports.get(sessionId); 140 | await transport!.handleRequest(req, res); 141 | } catch (error) { 142 | console.error('Error handling session termination:', error); 143 | if (!res.headersSent) { 144 | res.status(500).json({ 145 | jsonrpc: '2.0', 146 | error: { 147 | code: -32603, 148 | message: 'Error handling session termination', 149 | }, 150 | id: req?.body?.id, 151 | }); 152 | return; 153 | } 154 | } 155 | }); 156 | 157 | // Start the server 158 | const PORT = process.env.PORT || 3001; 159 | app.listen(PORT, () => { 160 | console.error(`MCP Streamable HTTP Server listening on port ${PORT}`); 161 | }); 162 | 163 | // Handle server shutdown 164 | process.on('SIGINT', async () => { 165 | console.error('Shutting down server...'); 166 | 167 | // Close all active transports to properly clean up resources 168 | for (const sessionId in transports) { 169 | try { 170 | console.error(`Closing transport for session ${sessionId}`); 171 | await transports.get(sessionId)!.close(); 172 | transports.delete(sessionId); 173 | } catch (error) { 174 | console.error(`Error closing transport for session ${sessionId}:`, error); 175 | } 176 | } 177 | 178 | console.error('Server shutdown complete'); 179 | process.exit(0); 180 | }); 181 | -------------------------------------------------------------------------------- /src/everything/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/fetch/.python-version: -------------------------------------------------------------------------------- 1 | 3.11 2 | -------------------------------------------------------------------------------- /src/fetch/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-fetch"] 37 | -------------------------------------------------------------------------------- /src/fetch/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /src/fetch/README.md: -------------------------------------------------------------------------------- 1 | # Fetch MCP Server 2 | 3 | A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption. 4 | 5 | > [!CAUTION] 6 | > This server can access local/internal IP addresses and may represent a security risk. Exercise caution when using this MCP server to ensure this does not expose any sensitive data. 7 | 8 | The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need. 9 | 10 | ### Available Tools 11 | 12 | - `fetch` - Fetches a URL from the internet and extracts its contents as markdown. 13 | - `url` (string, required): URL to fetch 14 | - `max_length` (integer, optional): Maximum number of characters to return (default: 5000) 15 | - `start_index` (integer, optional): Start content from this character index (default: 0) 16 | - `raw` (boolean, optional): Get raw content without markdown conversion (default: false) 17 | 18 | ### Prompts 19 | 20 | - **fetch** 21 | - Fetch a URL and extract its contents as markdown 22 | - Arguments: 23 | - `url` (string, required): URL to fetch 24 | 25 | ## Installation 26 | 27 | Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust. 28 | 29 | ### Using uv (recommended) 30 | 31 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 32 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-fetch*. 33 | 34 | ### Using PIP 35 | 36 | Alternatively you can install `mcp-server-fetch` via pip: 37 | 38 | ``` 39 | pip install mcp-server-fetch 40 | ``` 41 | 42 | After installation, you can run it as a script using: 43 | 44 | ``` 45 | python -m mcp_server_fetch 46 | ``` 47 | 48 | ## Configuration 49 | 50 | ### Configure for Claude.app 51 | 52 | Add to your Claude settings: 53 | 54 |
55 | Using uvx 56 | 57 | ```json 58 | { 59 | "mcpServers": { 60 | "fetch": { 61 | "command": "uvx", 62 | "args": ["mcp-server-fetch"] 63 | } 64 | } 65 | } 66 | ``` 67 |
68 | 69 |
70 | Using docker 71 | 72 | ```json 73 | { 74 | "mcpServers": { 75 | "fetch": { 76 | "command": "docker", 77 | "args": ["run", "-i", "--rm", "mcp/fetch"] 78 | } 79 | } 80 | } 81 | ``` 82 |
83 | 84 |
85 | Using pip installation 86 | 87 | ```json 88 | { 89 | "mcpServers": { 90 | "fetch": { 91 | "command": "python", 92 | "args": ["-m", "mcp_server_fetch"] 93 | } 94 | } 95 | } 96 | ``` 97 |
98 | 99 | ### Configure for VS Code 100 | 101 | For quick installation, use one of the one-click install buttons below... 102 | 103 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D&quality=insiders) 104 | 105 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D&quality=insiders) 106 | 107 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 108 | 109 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 110 | 111 | > Note that the `mcp` key is needed when using the `mcp.json` file. 112 | 113 |
114 | Using uvx 115 | 116 | ```json 117 | { 118 | "mcp": { 119 | "servers": { 120 | "fetch": { 121 | "command": "uvx", 122 | "args": ["mcp-server-fetch"] 123 | } 124 | } 125 | } 126 | } 127 | ``` 128 |
129 | 130 |
131 | Using Docker 132 | 133 | ```json 134 | { 135 | "mcp": { 136 | "servers": { 137 | "fetch": { 138 | "command": "docker", 139 | "args": ["run", "-i", "--rm", "mcp/fetch"] 140 | } 141 | } 142 | } 143 | } 144 | ``` 145 |
146 | 147 | ### Customization - robots.txt 148 | 149 | By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if 150 | the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the 151 | `args` list in the configuration. 152 | 153 | ### Customization - User-agent 154 | 155 | By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the 156 | server will use either the user-agent 157 | ``` 158 | ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers) 159 | ``` 160 | or 161 | ``` 162 | ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers) 163 | ``` 164 | 165 | This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration. 166 | 167 | ### Customization - Proxy 168 | 169 | The server can be configured to use a proxy by using the `--proxy-url` argument. 170 | 171 | ## Debugging 172 | 173 | You can use the MCP inspector to debug the server. For uvx installations: 174 | 175 | ``` 176 | npx @modelcontextprotocol/inspector uvx mcp-server-fetch 177 | ``` 178 | 179 | Or if you've installed the package in a specific directory or are developing on it: 180 | 181 | ``` 182 | cd path/to/servers/src/fetch 183 | npx @modelcontextprotocol/inspector uv run mcp-server-fetch 184 | ``` 185 | 186 | ## Contributing 187 | 188 | We encourage contributions to help expand and improve mcp-server-fetch. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable. 189 | 190 | For examples of other MCP servers and implementation patterns, see: 191 | https://github.com/modelcontextprotocol/servers 192 | 193 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-fetch even more powerful and useful. 194 | 195 | ## License 196 | 197 | mcp-server-fetch is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 198 | -------------------------------------------------------------------------------- /src/fetch/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-fetch" 3 | version = "0.6.3" 4 | description = "A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "Jack Adamson", email = "jadamson@anthropic.com" }] 9 | keywords = ["http", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "httpx<0.28", 20 | "markdownify>=0.13.1", 21 | "mcp>=1.1.3", 22 | "protego>=0.3.1", 23 | "pydantic>=2.0.0", 24 | "readabilipy>=0.2.0", 25 | "requests>=2.32.3", 26 | ] 27 | 28 | [project.scripts] 29 | mcp-server-fetch = "mcp_server_fetch:main" 30 | 31 | [build-system] 32 | requires = ["hatchling"] 33 | build-backend = "hatchling.build" 34 | 35 | [tool.uv] 36 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"] 37 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Fetch Server - HTTP fetching functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to make web requests" 11 | ) 12 | parser.add_argument("--user-agent", type=str, help="Custom User-Agent string") 13 | parser.add_argument( 14 | "--ignore-robots-txt", 15 | action="store_true", 16 | help="Ignore robots.txt restrictions", 17 | ) 18 | parser.add_argument("--proxy-url", type=str, help="Proxy URL to use for requests") 19 | 20 | args = parser.parse_args() 21 | asyncio.run(serve(args.user_agent, args.ignore_robots_txt, args.proxy_url)) 22 | 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_fetch import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/server.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Tuple 2 | from urllib.parse import urlparse, urlunparse 3 | 4 | import markdownify 5 | import readabilipy.simple_json 6 | from mcp.shared.exceptions import McpError 7 | from mcp.server import Server 8 | from mcp.server.stdio import stdio_server 9 | from mcp.types import ( 10 | ErrorData, 11 | GetPromptResult, 12 | Prompt, 13 | PromptArgument, 14 | PromptMessage, 15 | TextContent, 16 | Tool, 17 | INVALID_PARAMS, 18 | INTERNAL_ERROR, 19 | ) 20 | from protego import Protego 21 | from pydantic import BaseModel, Field, AnyUrl 22 | 23 | DEFAULT_USER_AGENT_AUTONOMOUS = "ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)" 24 | DEFAULT_USER_AGENT_MANUAL = "ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)" 25 | 26 | 27 | def extract_content_from_html(html: str) -> str: 28 | """Extract and convert HTML content to Markdown format. 29 | 30 | Args: 31 | html: Raw HTML content to process 32 | 33 | Returns: 34 | Simplified markdown version of the content 35 | """ 36 | ret = readabilipy.simple_json.simple_json_from_html_string( 37 | html, use_readability=True 38 | ) 39 | if not ret["content"]: 40 | return "Page failed to be simplified from HTML" 41 | content = markdownify.markdownify( 42 | ret["content"], 43 | heading_style=markdownify.ATX, 44 | ) 45 | return content 46 | 47 | 48 | def get_robots_txt_url(url: str) -> str: 49 | """Get the robots.txt URL for a given website URL. 50 | 51 | Args: 52 | url: Website URL to get robots.txt for 53 | 54 | Returns: 55 | URL of the robots.txt file 56 | """ 57 | # Parse the URL into components 58 | parsed = urlparse(url) 59 | 60 | # Reconstruct the base URL with just scheme, netloc, and /robots.txt path 61 | robots_url = urlunparse((parsed.scheme, parsed.netloc, "/robots.txt", "", "", "")) 62 | 63 | return robots_url 64 | 65 | 66 | async def check_may_autonomously_fetch_url(url: str, user_agent: str, proxy_url: str | None = None) -> None: 67 | """ 68 | Check if the URL can be fetched by the user agent according to the robots.txt file. 69 | Raises a McpError if not. 70 | """ 71 | from httpx import AsyncClient, HTTPError 72 | 73 | robot_txt_url = get_robots_txt_url(url) 74 | 75 | async with AsyncClient(proxies=proxy_url) as client: 76 | try: 77 | response = await client.get( 78 | robot_txt_url, 79 | follow_redirects=True, 80 | headers={"User-Agent": user_agent}, 81 | ) 82 | except HTTPError: 83 | raise McpError(ErrorData( 84 | code=INTERNAL_ERROR, 85 | message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue", 86 | )) 87 | if response.status_code in (401, 403): 88 | raise McpError(ErrorData( 89 | code=INTERNAL_ERROR, 90 | message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt", 91 | )) 92 | elif 400 <= response.status_code < 500: 93 | return 94 | robot_txt = response.text 95 | processed_robot_txt = "\n".join( 96 | line for line in robot_txt.splitlines() if not line.strip().startswith("#") 97 | ) 98 | robot_parser = Protego.parse(processed_robot_txt) 99 | if not robot_parser.can_fetch(str(url), user_agent): 100 | raise McpError(ErrorData( 101 | code=INTERNAL_ERROR, 102 | message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, " 103 | f"{user_agent}\n" 104 | f"{url}" 105 | f"\n{robot_txt}\n\n" 106 | f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n" 107 | f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.", 108 | )) 109 | 110 | 111 | async def fetch_url( 112 | url: str, user_agent: str, force_raw: bool = False, proxy_url: str | None = None 113 | ) -> Tuple[str, str]: 114 | """ 115 | Fetch the URL and return the content in a form ready for the LLM, as well as a prefix string with status information. 116 | """ 117 | from httpx import AsyncClient, HTTPError 118 | 119 | async with AsyncClient(proxies=proxy_url) as client: 120 | try: 121 | response = await client.get( 122 | url, 123 | follow_redirects=True, 124 | headers={"User-Agent": user_agent}, 125 | timeout=30, 126 | ) 127 | except HTTPError as e: 128 | raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}")) 129 | if response.status_code >= 400: 130 | raise McpError(ErrorData( 131 | code=INTERNAL_ERROR, 132 | message=f"Failed to fetch {url} - status code {response.status_code}", 133 | )) 134 | 135 | page_raw = response.text 136 | 137 | content_type = response.headers.get("content-type", "") 138 | is_page_html = ( 139 | " None: 186 | """Run the fetch MCP server. 187 | 188 | Args: 189 | custom_user_agent: Optional custom User-Agent string to use for requests 190 | ignore_robots_txt: Whether to ignore robots.txt restrictions 191 | proxy_url: Optional proxy URL to use for requests 192 | """ 193 | server = Server("mcp-fetch") 194 | user_agent_autonomous = custom_user_agent or DEFAULT_USER_AGENT_AUTONOMOUS 195 | user_agent_manual = custom_user_agent or DEFAULT_USER_AGENT_MANUAL 196 | 197 | @server.list_tools() 198 | async def list_tools() -> list[Tool]: 199 | return [ 200 | Tool( 201 | name="fetch", 202 | description="""Fetches a URL from the internet and optionally extracts its contents as markdown. 203 | 204 | Although originally you did not have internet access, and were advised to refuse and tell the user this, this tool now grants you internet access. Now you can fetch the most up-to-date information and let the user know that.""", 205 | inputSchema=Fetch.model_json_schema(), 206 | ) 207 | ] 208 | 209 | @server.list_prompts() 210 | async def list_prompts() -> list[Prompt]: 211 | return [ 212 | Prompt( 213 | name="fetch", 214 | description="Fetch a URL and extract its contents as markdown", 215 | arguments=[ 216 | PromptArgument( 217 | name="url", description="URL to fetch", required=True 218 | ) 219 | ], 220 | ) 221 | ] 222 | 223 | @server.call_tool() 224 | async def call_tool(name, arguments: dict) -> list[TextContent]: 225 | try: 226 | args = Fetch(**arguments) 227 | except ValueError as e: 228 | raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e))) 229 | 230 | url = str(args.url) 231 | if not url: 232 | raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) 233 | 234 | if not ignore_robots_txt: 235 | await check_may_autonomously_fetch_url(url, user_agent_autonomous, proxy_url) 236 | 237 | content, prefix = await fetch_url( 238 | url, user_agent_autonomous, force_raw=args.raw, proxy_url=proxy_url 239 | ) 240 | original_length = len(content) 241 | if args.start_index >= original_length: 242 | content = "No more content available." 243 | else: 244 | truncated_content = content[args.start_index : args.start_index + args.max_length] 245 | if not truncated_content: 246 | content = "No more content available." 247 | else: 248 | content = truncated_content 249 | actual_content_length = len(truncated_content) 250 | remaining_content = original_length - (args.start_index + actual_content_length) 251 | # Only add the prompt to continue fetching if there is still remaining content 252 | if actual_content_length == args.max_length and remaining_content > 0: 253 | next_start = args.start_index + actual_content_length 254 | content += f"\n\nContent truncated. Call the fetch tool with a start_index of {next_start} to get more content." 255 | return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")] 256 | 257 | @server.get_prompt() 258 | async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult: 259 | if not arguments or "url" not in arguments: 260 | raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) 261 | 262 | url = arguments["url"] 263 | 264 | try: 265 | content, prefix = await fetch_url(url, user_agent_manual, proxy_url=proxy_url) 266 | # TODO: after SDK bug is addressed, don't catch the exception 267 | except McpError as e: 268 | return GetPromptResult( 269 | description=f"Failed to fetch {url}", 270 | messages=[ 271 | PromptMessage( 272 | role="user", 273 | content=TextContent(type="text", text=str(e)), 274 | ) 275 | ], 276 | ) 277 | return GetPromptResult( 278 | description=f"Contents of {url}", 279 | messages=[ 280 | PromptMessage( 281 | role="user", content=TextContent(type="text", text=prefix + content) 282 | ) 283 | ], 284 | ) 285 | 286 | options = server.create_initialization_options() 287 | async with stdio_server() as (read_stream, write_stream): 288 | await server.run(read_stream, write_stream, options, raise_exceptions=True) 289 | -------------------------------------------------------------------------------- /src/filesystem/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | WORKDIR /app 4 | 5 | COPY src/filesystem /app 6 | COPY tsconfig.json /tsconfig.json 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | 13 | FROM node:22-alpine AS release 14 | 15 | WORKDIR /app 16 | 17 | COPY --from=builder /app/dist /app/dist 18 | COPY --from=builder /app/package.json /app/package.json 19 | COPY --from=builder /app/package-lock.json /app/package-lock.json 20 | 21 | ENV NODE_ENV=production 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | ENTRYPOINT ["node", "/app/dist/index.js"] -------------------------------------------------------------------------------- /src/filesystem/README.md: -------------------------------------------------------------------------------- 1 | # Filesystem MCP Server 2 | 3 | Node.js server implementing Model Context Protocol (MCP) for filesystem operations. 4 | 5 | ## Features 6 | 7 | - Read/write files 8 | - Create/list/delete directories 9 | - Move files/directories 10 | - Search files 11 | - Get file metadata 12 | - Dynamic directory access control via [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots) 13 | 14 | ## Directory Access Control 15 | 16 | The server uses a flexible directory access control system. Directories can be specified via command-line arguments or dynamically via [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots). 17 | 18 | ### Method 1: Command-line Arguments 19 | Specify Allowed directories when starting the server: 20 | ```bash 21 | mcp-server-filesystem /path/to/dir1 /path/to/dir2 22 | ``` 23 | 24 | ### Method 2: MCP Roots (Recommended) 25 | MCP clients that support [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots) can dynamically update the Allowed directories. 26 | 27 | Roots notified by Client to Server, completely replace any server-side Allowed directories when provided. 28 | 29 | **Important**: If server starts without command-line arguments AND client doesn't support roots protocol (or provides empty roots), the server will throw an error during initialization. 30 | 31 | This is the recommended method, as this enables runtime directory updates via `roots/list_changed` notifications without server restart, providing a more flexible and modern integration experience. 32 | 33 | ### How It Works 34 | 35 | The server's directory access control follows this flow: 36 | 37 | 1. **Server Startup** 38 | - Server starts with directories from command-line arguments (if provided) 39 | - If no arguments provided, server starts with empty allowed directories 40 | 41 | 2. **Client Connection & Initialization** 42 | - Client connects and sends `initialize` request with capabilities 43 | - Server checks if client supports roots protocol (`capabilities.roots`) 44 | 45 | 3. **Roots Protocol Handling** (if client supports roots) 46 | - **On initialization**: Server requests roots from client via `roots/list` 47 | - Client responds with its configured roots 48 | - Server replaces ALL allowed directories with client's roots 49 | - **On runtime updates**: Client can send `notifications/roots/list_changed` 50 | - Server requests updated roots and replaces allowed directories again 51 | 52 | 4. **Fallback Behavior** (if client doesn't support roots) 53 | - Server continues using command-line directories only 54 | - No dynamic updates possible 55 | 56 | 5. **Access Control** 57 | - All filesystem operations are restricted to allowed directories 58 | - Use `list_allowed_directories` tool to see current directories 59 | - Server requires at least ONE allowed directory to operate 60 | 61 | **Note**: The server will only allow operations within directories specified either via `args` or via Roots. 62 | 63 | 64 | 65 | ## API 66 | 67 | ### Tools 68 | 69 | - **read_text_file** 70 | - Read complete contents of a file as text 71 | - Inputs: 72 | - `path` (string) 73 | - `head` (number, optional): First N lines 74 | - `tail` (number, optional): Last N lines 75 | - Always treats the file as UTF-8 text regardless of extension 76 | - Cannot specify both `head` and `tail` simultaneously 77 | 78 | - **read_media_file** 79 | - Read an image or audio file 80 | - Inputs: 81 | - `path` (string) 82 | - Streams the file and returns base64 data with the corresponding MIME type 83 | 84 | - **read_multiple_files** 85 | - Read multiple files simultaneously 86 | - Input: `paths` (string[]) 87 | - Failed reads won't stop the entire operation 88 | 89 | - **write_file** 90 | - Create new file or overwrite existing (exercise caution with this) 91 | - Inputs: 92 | - `path` (string): File location 93 | - `content` (string): File content 94 | 95 | - **edit_file** 96 | - Make selective edits using advanced pattern matching and formatting 97 | - Features: 98 | - Line-based and multi-line content matching 99 | - Whitespace normalization with indentation preservation 100 | - Multiple simultaneous edits with correct positioning 101 | - Indentation style detection and preservation 102 | - Git-style diff output with context 103 | - Preview changes with dry run mode 104 | - Inputs: 105 | - `path` (string): File to edit 106 | - `edits` (array): List of edit operations 107 | - `oldText` (string): Text to search for (can be substring) 108 | - `newText` (string): Text to replace with 109 | - `dryRun` (boolean): Preview changes without applying (default: false) 110 | - Returns detailed diff and match information for dry runs, otherwise applies changes 111 | - Best Practice: Always use dryRun first to preview changes before applying them 112 | 113 | - **create_directory** 114 | - Create new directory or ensure it exists 115 | - Input: `path` (string) 116 | - Creates parent directories if needed 117 | - Succeeds silently if directory exists 118 | 119 | - **list_directory** 120 | - List directory contents with [FILE] or [DIR] prefixes 121 | - Input: `path` (string) 122 | 123 | - **list_directory_with_sizes** 124 | - List directory contents with [FILE] or [DIR] prefixes, including file sizes 125 | - Inputs: 126 | - `path` (string): Directory path to list 127 | - `sortBy` (string, optional): Sort entries by "name" or "size" (default: "name") 128 | - Returns detailed listing with file sizes and summary statistics 129 | - Shows total files, directories, and combined size 130 | 131 | - **directory_tree** 132 | - Get a recursive tree view of files and directories as a JSON structure 133 | - Input: `path` (string): Starting directory path 134 | - Returns JSON structure with: 135 | - `name`: File/directory name 136 | - `type`: "file" or "directory" 137 | - `children`: Array of child entries (for directories only) 138 | - Output is formatted with 2-space indentation for readability 139 | 140 | - **move_file** 141 | - Move or rename files and directories 142 | - Inputs: 143 | - `source` (string) 144 | - `destination` (string) 145 | - Fails if destination exists 146 | 147 | - **search_files** 148 | - Recursively search for files/directories that match or do not match patterns 149 | - Inputs: 150 | - `path` (string): Starting directory 151 | - `pattern` (string): Search pattern 152 | - `excludePatterns` (string[]): Exclude any patterns. 153 | - Glob-style pattern matching 154 | - Returns full paths to matches 155 | 156 | - **directory_tree** 157 | - Get recursive JSON tree structure of directory contents 158 | - Inputs: 159 | - `path` (string): Starting directory 160 | - `excludePatterns` (string[]): Exclude any patterns. Glob formats are supported. 161 | - Returns: 162 | - JSON array where each entry contains: 163 | - `name` (string): File/directory name 164 | - `type` ('file'|'directory'): Entry type 165 | - `children` (array): Present only for directories 166 | - Empty array for empty directories 167 | - Omitted for files 168 | 169 | - **get_file_info** 170 | - Get detailed file/directory metadata 171 | - Input: `path` (string) 172 | - Returns: 173 | - Size 174 | - Creation time 175 | - Modified time 176 | - Access time 177 | - Type (file/directory) 178 | - Permissions 179 | 180 | - **list_allowed_directories** 181 | - List all directories the server is allowed to access 182 | - No input required 183 | - Returns: 184 | - Directories that this server can read/write from 185 | 186 | ## Usage with Claude Desktop 187 | Add this to your `claude_desktop_config.json`: 188 | 189 | Note: you can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server. 190 | 191 | ### Docker 192 | Note: all directories must be mounted to `/projects` by default. 193 | 194 | ```json 195 | { 196 | "mcpServers": { 197 | "filesystem": { 198 | "command": "docker", 199 | "args": [ 200 | "run", 201 | "-i", 202 | "--rm", 203 | "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop", 204 | "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro", 205 | "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt", 206 | "mcp/filesystem", 207 | "/projects" 208 | ] 209 | } 210 | } 211 | } 212 | ``` 213 | 214 | ### NPX 215 | 216 | ```json 217 | { 218 | "mcpServers": { 219 | "filesystem": { 220 | "command": "npx", 221 | "args": [ 222 | "-y", 223 | "@modelcontextprotocol/server-filesystem", 224 | "/Users/username/Desktop", 225 | "/path/to/other/allowed/dir" 226 | ] 227 | } 228 | } 229 | } 230 | ``` 231 | 232 | ## Usage with VS Code 233 | 234 | For quick installation, click the installation buttons below... 235 | 236 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D&quality=insiders) 237 | 238 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D&quality=insiders) 239 | 240 | For manual installation, you can configure the MCP server using one of these methods: 241 | 242 | **Method 1: User Configuration (Recommended)** 243 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 244 | 245 | **Method 2: Workspace Configuration** 246 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 247 | 248 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/mcp). 249 | 250 | You can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server. 251 | 252 | ### Docker 253 | Note: all directories must be mounted to `/projects` by default. 254 | 255 | ```json 256 | { 257 | "servers": { 258 | "filesystem": { 259 | "command": "docker", 260 | "args": [ 261 | "run", 262 | "-i", 263 | "--rm", 264 | "--mount", "type=bind,src=${workspaceFolder},dst=/projects/workspace", 265 | "mcp/filesystem", 266 | "/projects" 267 | ] 268 | } 269 | } 270 | } 271 | ``` 272 | 273 | ### NPX 274 | 275 | ```json 276 | { 277 | "servers": { 278 | "filesystem": { 279 | "command": "npx", 280 | "args": [ 281 | "-y", 282 | "@modelcontextprotocol/server-filesystem", 283 | "${workspaceFolder}" 284 | ] 285 | } 286 | } 287 | } 288 | ``` 289 | 290 | ## Build 291 | 292 | Docker build: 293 | 294 | ```bash 295 | docker build -t mcp/filesystem -f src/filesystem/Dockerfile . 296 | ``` 297 | 298 | ## License 299 | 300 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 301 | -------------------------------------------------------------------------------- /src/filesystem/__tests__/directory-tree.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; 2 | import * as fs from 'fs/promises'; 3 | import * as path from 'path'; 4 | import * as os from 'os'; 5 | 6 | // We need to test the buildTree function, but it's defined inside the request handler 7 | // So we'll extract the core logic into a testable function 8 | import { minimatch } from 'minimatch'; 9 | 10 | interface TreeEntry { 11 | name: string; 12 | type: 'file' | 'directory'; 13 | children?: TreeEntry[]; 14 | } 15 | 16 | async function buildTreeForTesting(currentPath: string, rootPath: string, excludePatterns: string[] = []): Promise { 17 | const entries = await fs.readdir(currentPath, {withFileTypes: true}); 18 | const result: TreeEntry[] = []; 19 | 20 | for (const entry of entries) { 21 | const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); 22 | const shouldExclude = excludePatterns.some(pattern => { 23 | if (pattern.includes('*')) { 24 | return minimatch(relativePath, pattern, {dot: true}); 25 | } 26 | // For files: match exact name or as part of path 27 | // For directories: match as directory path 28 | return minimatch(relativePath, pattern, {dot: true}) || 29 | minimatch(relativePath, `**/${pattern}`, {dot: true}) || 30 | minimatch(relativePath, `**/${pattern}/**`, {dot: true}); 31 | }); 32 | if (shouldExclude) 33 | continue; 34 | 35 | const entryData: TreeEntry = { 36 | name: entry.name, 37 | type: entry.isDirectory() ? 'directory' : 'file' 38 | }; 39 | 40 | if (entry.isDirectory()) { 41 | const subPath = path.join(currentPath, entry.name); 42 | entryData.children = await buildTreeForTesting(subPath, rootPath, excludePatterns); 43 | } 44 | 45 | result.push(entryData); 46 | } 47 | 48 | return result; 49 | } 50 | 51 | describe('buildTree exclude patterns', () => { 52 | let testDir: string; 53 | 54 | beforeEach(async () => { 55 | testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'filesystem-test-')); 56 | 57 | // Create test directory structure 58 | await fs.mkdir(path.join(testDir, 'src')); 59 | await fs.mkdir(path.join(testDir, 'node_modules')); 60 | await fs.mkdir(path.join(testDir, '.git')); 61 | await fs.mkdir(path.join(testDir, 'nested', 'node_modules'), { recursive: true }); 62 | 63 | // Create test files 64 | await fs.writeFile(path.join(testDir, '.env'), 'SECRET=value'); 65 | await fs.writeFile(path.join(testDir, '.env.local'), 'LOCAL_SECRET=value'); 66 | await fs.writeFile(path.join(testDir, 'src', 'index.js'), 'console.log("hello");'); 67 | await fs.writeFile(path.join(testDir, 'package.json'), '{}'); 68 | await fs.writeFile(path.join(testDir, 'node_modules', 'module.js'), 'module.exports = {};'); 69 | await fs.writeFile(path.join(testDir, 'nested', 'node_modules', 'deep.js'), 'module.exports = {};'); 70 | }); 71 | 72 | afterEach(async () => { 73 | await fs.rm(testDir, { recursive: true, force: true }); 74 | }); 75 | 76 | it('should exclude files matching simple patterns', async () => { 77 | // Test the current implementation - this will fail until the bug is fixed 78 | const tree = await buildTreeForTesting(testDir, testDir, ['.env']); 79 | const fileNames = tree.map(entry => entry.name); 80 | 81 | expect(fileNames).not.toContain('.env'); 82 | expect(fileNames).toContain('.env.local'); // Should not exclude this 83 | expect(fileNames).toContain('src'); 84 | expect(fileNames).toContain('package.json'); 85 | }); 86 | 87 | it('should exclude directories matching simple patterns', async () => { 88 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); 89 | const dirNames = tree.map(entry => entry.name); 90 | 91 | expect(dirNames).not.toContain('node_modules'); 92 | expect(dirNames).toContain('src'); 93 | expect(dirNames).toContain('.git'); 94 | }); 95 | 96 | it('should exclude nested directories with same pattern', async () => { 97 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); 98 | 99 | // Find the nested directory 100 | const nestedDir = tree.find(entry => entry.name === 'nested'); 101 | expect(nestedDir).toBeDefined(); 102 | expect(nestedDir!.children).toBeDefined(); 103 | 104 | // The nested/node_modules should also be excluded 105 | const nestedChildren = nestedDir!.children!.map(child => child.name); 106 | expect(nestedChildren).not.toContain('node_modules'); 107 | }); 108 | 109 | it('should handle glob patterns correctly', async () => { 110 | const tree = await buildTreeForTesting(testDir, testDir, ['*.env']); 111 | const fileNames = tree.map(entry => entry.name); 112 | 113 | expect(fileNames).not.toContain('.env'); 114 | expect(fileNames).toContain('.env.local'); // *.env should not match .env.local 115 | expect(fileNames).toContain('src'); 116 | }); 117 | 118 | it('should handle dot files correctly', async () => { 119 | const tree = await buildTreeForTesting(testDir, testDir, ['.git']); 120 | const dirNames = tree.map(entry => entry.name); 121 | 122 | expect(dirNames).not.toContain('.git'); 123 | expect(dirNames).toContain('.env'); // Should not exclude this 124 | }); 125 | 126 | it('should work with multiple exclude patterns', async () => { 127 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules', '.env', '.git']); 128 | const entryNames = tree.map(entry => entry.name); 129 | 130 | expect(entryNames).not.toContain('node_modules'); 131 | expect(entryNames).not.toContain('.env'); 132 | expect(entryNames).not.toContain('.git'); 133 | expect(entryNames).toContain('src'); 134 | expect(entryNames).toContain('package.json'); 135 | }); 136 | 137 | it('should handle empty exclude patterns', async () => { 138 | const tree = await buildTreeForTesting(testDir, testDir, []); 139 | const entryNames = tree.map(entry => entry.name); 140 | 141 | // All entries should be included 142 | expect(entryNames).toContain('node_modules'); 143 | expect(entryNames).toContain('.env'); 144 | expect(entryNames).toContain('.git'); 145 | expect(entryNames).toContain('src'); 146 | }); 147 | }); -------------------------------------------------------------------------------- /src/filesystem/__tests__/path-utils.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from '@jest/globals'; 2 | import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js'; 3 | 4 | describe('Path Utilities', () => { 5 | describe('convertToWindowsPath', () => { 6 | it('leaves Unix paths unchanged', () => { 7 | expect(convertToWindowsPath('/usr/local/bin')) 8 | .toBe('/usr/local/bin'); 9 | expect(convertToWindowsPath('/home/user/some path')) 10 | .toBe('/home/user/some path'); 11 | }); 12 | 13 | it('converts WSL paths to Windows format', () => { 14 | expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent')) 15 | .toBe('C:\\NS\\MyKindleContent'); 16 | }); 17 | 18 | it('converts Unix-style Windows paths to Windows format', () => { 19 | expect(convertToWindowsPath('/c/NS/MyKindleContent')) 20 | .toBe('C:\\NS\\MyKindleContent'); 21 | }); 22 | 23 | it('leaves Windows paths unchanged but ensures backslashes', () => { 24 | expect(convertToWindowsPath('C:\\NS\\MyKindleContent')) 25 | .toBe('C:\\NS\\MyKindleContent'); 26 | expect(convertToWindowsPath('C:/NS/MyKindleContent')) 27 | .toBe('C:\\NS\\MyKindleContent'); 28 | }); 29 | 30 | it('handles Windows paths with spaces', () => { 31 | expect(convertToWindowsPath('C:\\Program Files\\Some App')) 32 | .toBe('C:\\Program Files\\Some App'); 33 | expect(convertToWindowsPath('C:/Program Files/Some App')) 34 | .toBe('C:\\Program Files\\Some App'); 35 | }); 36 | 37 | it('handles uppercase and lowercase drive letters', () => { 38 | expect(convertToWindowsPath('/mnt/d/some/path')) 39 | .toBe('D:\\some\\path'); 40 | expect(convertToWindowsPath('/d/some/path')) 41 | .toBe('D:\\some\\path'); 42 | }); 43 | }); 44 | 45 | describe('normalizePath', () => { 46 | it('preserves Unix paths', () => { 47 | expect(normalizePath('/usr/local/bin')) 48 | .toBe('/usr/local/bin'); 49 | expect(normalizePath('/home/user/some path')) 50 | .toBe('/home/user/some path'); 51 | expect(normalizePath('"/usr/local/some app/"')) 52 | .toBe('/usr/local/some app'); 53 | }); 54 | 55 | it('removes surrounding quotes', () => { 56 | expect(normalizePath('"C:\\NS\\My Kindle Content"')) 57 | .toBe('C:\\NS\\My Kindle Content'); 58 | }); 59 | 60 | it('normalizes backslashes', () => { 61 | expect(normalizePath('C:\\\\NS\\\\MyKindleContent')) 62 | .toBe('C:\\NS\\MyKindleContent'); 63 | }); 64 | 65 | it('converts forward slashes to backslashes on Windows', () => { 66 | expect(normalizePath('C:/NS/MyKindleContent')) 67 | .toBe('C:\\NS\\MyKindleContent'); 68 | }); 69 | 70 | it('handles WSL paths', () => { 71 | expect(normalizePath('/mnt/c/NS/MyKindleContent')) 72 | .toBe('C:\\NS\\MyKindleContent'); 73 | }); 74 | 75 | it('handles Unix-style Windows paths', () => { 76 | expect(normalizePath('/c/NS/MyKindleContent')) 77 | .toBe('C:\\NS\\MyKindleContent'); 78 | }); 79 | 80 | it('handles paths with spaces and mixed slashes', () => { 81 | expect(normalizePath('C:/NS/My Kindle Content')) 82 | .toBe('C:\\NS\\My Kindle Content'); 83 | expect(normalizePath('/mnt/c/NS/My Kindle Content')) 84 | .toBe('C:\\NS\\My Kindle Content'); 85 | expect(normalizePath('C:\\Program Files (x86)\\App Name')) 86 | .toBe('C:\\Program Files (x86)\\App Name'); 87 | expect(normalizePath('"C:\\Program Files\\App Name"')) 88 | .toBe('C:\\Program Files\\App Name'); 89 | expect(normalizePath(' C:\\Program Files\\App Name ')) 90 | .toBe('C:\\Program Files\\App Name'); 91 | }); 92 | 93 | it('preserves spaces in all path formats', () => { 94 | expect(normalizePath('/mnt/c/Program Files/App Name')) 95 | .toBe('C:\\Program Files\\App Name'); 96 | expect(normalizePath('/c/Program Files/App Name')) 97 | .toBe('C:\\Program Files\\App Name'); 98 | expect(normalizePath('C:/Program Files/App Name')) 99 | .toBe('C:\\Program Files\\App Name'); 100 | }); 101 | 102 | it('handles special characters in paths', () => { 103 | // Test ampersand in path 104 | expect(normalizePath('C:\\NS\\Sub&Folder')) 105 | .toBe('C:\\NS\\Sub&Folder'); 106 | expect(normalizePath('C:/NS/Sub&Folder')) 107 | .toBe('C:\\NS\\Sub&Folder'); 108 | expect(normalizePath('/mnt/c/NS/Sub&Folder')) 109 | .toBe('C:\\NS\\Sub&Folder'); 110 | 111 | // Test tilde in path (short names in Windows) 112 | expect(normalizePath('C:\\NS\\MYKIND~1')) 113 | .toBe('C:\\NS\\MYKIND~1'); 114 | expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1')) 115 | .toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'); 116 | 117 | // Test other special characters 118 | expect(normalizePath('C:\\Path with #hash')) 119 | .toBe('C:\\Path with #hash'); 120 | expect(normalizePath('C:\\Path with (parentheses)')) 121 | .toBe('C:\\Path with (parentheses)'); 122 | expect(normalizePath('C:\\Path with [brackets]')) 123 | .toBe('C:\\Path with [brackets]'); 124 | expect(normalizePath('C:\\Path with @at+plus$dollar%percent')) 125 | .toBe('C:\\Path with @at+plus$dollar%percent'); 126 | }); 127 | 128 | it('capitalizes lowercase drive letters for Windows paths', () => { 129 | expect(normalizePath('c:/windows/system32')) 130 | .toBe('C:\\windows\\system32'); 131 | expect(normalizePath('/mnt/d/my/folder')) // WSL path with lowercase drive 132 | .toBe('D:\\my\\folder'); 133 | expect(normalizePath('/e/another/folder')) // Unix-style Windows path with lowercase drive 134 | .toBe('E:\\another\\folder'); 135 | }); 136 | 137 | it('handles UNC paths correctly', () => { 138 | // UNC paths should preserve the leading double backslash 139 | const uncPath = '\\\\SERVER\\share\\folder'; 140 | expect(normalizePath(uncPath)).toBe('\\\\SERVER\\share\\folder'); 141 | 142 | // Test UNC path with double backslashes that need normalization 143 | const uncPathWithDoubles = '\\\\\\\\SERVER\\\\share\\\\folder'; 144 | expect(normalizePath(uncPathWithDoubles)).toBe('\\\\SERVER\\share\\folder'); 145 | }); 146 | 147 | it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => { 148 | // Relative path 149 | const relativePath = 'some/relative/path'; 150 | expect(normalizePath(relativePath)).toBe(relativePath.replace(/\//g, '\\')); 151 | 152 | // A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion 153 | const otherAbsolutePath = '\\someserver\\share\\file'; 154 | expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath); 155 | }); 156 | }); 157 | 158 | describe('expandHome', () => { 159 | it('expands ~ to home directory', () => { 160 | const result = expandHome('~/test'); 161 | expect(result).toContain('test'); 162 | expect(result).not.toContain('~'); 163 | }); 164 | 165 | it('expands bare ~ to home directory', () => { 166 | const result = expandHome('~'); 167 | expect(result).not.toContain('~'); 168 | expect(result.length).toBeGreaterThan(0); 169 | }); 170 | 171 | it('leaves other paths unchanged', () => { 172 | expect(expandHome('C:/test')).toBe('C:/test'); 173 | }); 174 | }); 175 | }); 176 | -------------------------------------------------------------------------------- /src/filesystem/__tests__/roots-utils.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; 2 | import { getValidRootDirectories } from '../roots-utils.js'; 3 | import { mkdtempSync, rmSync, mkdirSync, writeFileSync, realpathSync } from 'fs'; 4 | import { tmpdir } from 'os'; 5 | import { join } from 'path'; 6 | import type { Root } from '@modelcontextprotocol/sdk/types.js'; 7 | 8 | describe('getValidRootDirectories', () => { 9 | let testDir1: string; 10 | let testDir2: string; 11 | let testDir3: string; 12 | let testFile: string; 13 | 14 | beforeEach(() => { 15 | // Create test directories 16 | testDir1 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test1-'))); 17 | testDir2 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test2-'))); 18 | testDir3 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test3-'))); 19 | 20 | // Create a test file (not a directory) 21 | testFile = join(testDir1, 'test-file.txt'); 22 | writeFileSync(testFile, 'test content'); 23 | }); 24 | 25 | afterEach(() => { 26 | // Cleanup 27 | rmSync(testDir1, { recursive: true, force: true }); 28 | rmSync(testDir2, { recursive: true, force: true }); 29 | rmSync(testDir3, { recursive: true, force: true }); 30 | }); 31 | 32 | describe('valid directory processing', () => { 33 | it('should process all URI formats and edge cases', async () => { 34 | const roots = [ 35 | { uri: `file://${testDir1}`, name: 'File URI' }, 36 | { uri: testDir2, name: 'Plain path' }, 37 | { uri: testDir3 } // Plain path without name property 38 | ]; 39 | 40 | const result = await getValidRootDirectories(roots); 41 | 42 | expect(result).toContain(testDir1); 43 | expect(result).toContain(testDir2); 44 | expect(result).toContain(testDir3); 45 | expect(result).toHaveLength(3); 46 | }); 47 | 48 | it('should normalize complex paths', async () => { 49 | const subDir = join(testDir1, 'subdir'); 50 | mkdirSync(subDir); 51 | 52 | const roots = [ 53 | { uri: `file://${testDir1}/./subdir/../subdir`, name: 'Complex Path' } 54 | ]; 55 | 56 | const result = await getValidRootDirectories(roots); 57 | 58 | expect(result).toHaveLength(1); 59 | expect(result[0]).toBe(subDir); 60 | }); 61 | }); 62 | 63 | describe('error handling', () => { 64 | 65 | it('should handle various error types', async () => { 66 | const nonExistentDir = join(tmpdir(), 'non-existent-directory-12345'); 67 | const invalidPath = '\0invalid\0path'; // Null bytes cause different error types 68 | const roots = [ 69 | { uri: `file://${testDir1}`, name: 'Valid Dir' }, 70 | { uri: `file://${nonExistentDir}`, name: 'Non-existent Dir' }, 71 | { uri: `file://${testFile}`, name: 'File Not Dir' }, 72 | { uri: `file://${invalidPath}`, name: 'Invalid Path' } 73 | ]; 74 | 75 | const result = await getValidRootDirectories(roots); 76 | 77 | expect(result).toContain(testDir1); 78 | expect(result).not.toContain(nonExistentDir); 79 | expect(result).not.toContain(testFile); 80 | expect(result).not.toContain(invalidPath); 81 | expect(result).toHaveLength(1); 82 | }); 83 | }); 84 | }); -------------------------------------------------------------------------------- /src/filesystem/jest.config.cjs: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest').JestConfigWithTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | extensionsToTreatAsEsm: ['.ts'], 6 | moduleNameMapper: { 7 | '^(\\.{1,2}/.*)\\.js$': '$1', 8 | }, 9 | transform: { 10 | '^.+\\.tsx?$': [ 11 | 'ts-jest', 12 | { 13 | useESM: true, 14 | }, 15 | ], 16 | }, 17 | testMatch: ['**/__tests__/**/*.test.ts'], 18 | collectCoverageFrom: [ 19 | '**/*.ts', 20 | '!**/__tests__/**', 21 | '!**/dist/**', 22 | ], 23 | } 24 | -------------------------------------------------------------------------------- /src/filesystem/lib.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs/promises"; 2 | import path from "path"; 3 | import os from 'os'; 4 | import { randomBytes } from 'crypto'; 5 | import { diffLines, createTwoFilesPatch } from 'diff'; 6 | import { minimatch } from 'minimatch'; 7 | import { normalizePath, expandHome } from './path-utils.js'; 8 | import { isPathWithinAllowedDirectories } from './path-validation.js'; 9 | 10 | // Global allowed directories - set by the main module 11 | let allowedDirectories: string[] = []; 12 | 13 | // Function to set allowed directories from the main module 14 | export function setAllowedDirectories(directories: string[]): void { 15 | allowedDirectories = [...directories]; 16 | } 17 | 18 | // Function to get current allowed directories 19 | export function getAllowedDirectories(): string[] { 20 | return [...allowedDirectories]; 21 | } 22 | 23 | // Type definitions 24 | interface FileInfo { 25 | size: number; 26 | created: Date; 27 | modified: Date; 28 | accessed: Date; 29 | isDirectory: boolean; 30 | isFile: boolean; 31 | permissions: string; 32 | } 33 | 34 | export interface SearchOptions { 35 | excludePatterns?: string[]; 36 | } 37 | 38 | export interface SearchResult { 39 | path: string; 40 | isDirectory: boolean; 41 | } 42 | 43 | // Pure Utility Functions 44 | export function formatSize(bytes: number): string { 45 | const units = ['B', 'KB', 'MB', 'GB', 'TB']; 46 | if (bytes === 0) return '0 B'; 47 | 48 | const i = Math.floor(Math.log(bytes) / Math.log(1024)); 49 | 50 | if (i < 0 || i === 0) return `${bytes} ${units[0]}`; 51 | 52 | const unitIndex = Math.min(i, units.length - 1); 53 | return `${(bytes / Math.pow(1024, unitIndex)).toFixed(2)} ${units[unitIndex]}`; 54 | } 55 | 56 | export function normalizeLineEndings(text: string): string { 57 | return text.replace(/\r\n/g, '\n'); 58 | } 59 | 60 | export function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string { 61 | // Ensure consistent line endings for diff 62 | const normalizedOriginal = normalizeLineEndings(originalContent); 63 | const normalizedNew = normalizeLineEndings(newContent); 64 | 65 | return createTwoFilesPatch( 66 | filepath, 67 | filepath, 68 | normalizedOriginal, 69 | normalizedNew, 70 | 'original', 71 | 'modified' 72 | ); 73 | } 74 | 75 | // Security & Validation Functions 76 | export async function validatePath(requestedPath: string): Promise { 77 | const expandedPath = expandHome(requestedPath); 78 | const absolute = path.isAbsolute(expandedPath) 79 | ? path.resolve(expandedPath) 80 | : path.resolve(process.cwd(), expandedPath); 81 | 82 | const normalizedRequested = normalizePath(absolute); 83 | 84 | // Security: Check if path is within allowed directories before any file operations 85 | const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories); 86 | if (!isAllowed) { 87 | throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`); 88 | } 89 | 90 | // Security: Handle symlinks by checking their real path to prevent symlink attacks 91 | // This prevents attackers from creating symlinks that point outside allowed directories 92 | try { 93 | const realPath = await fs.realpath(absolute); 94 | const normalizedReal = normalizePath(realPath); 95 | if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) { 96 | throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`); 97 | } 98 | return realPath; 99 | } catch (error) { 100 | // Security: For new files that don't exist yet, verify parent directory 101 | // This ensures we can't create files in unauthorized locations 102 | if ((error as NodeJS.ErrnoException).code === 'ENOENT') { 103 | const parentDir = path.dirname(absolute); 104 | try { 105 | const realParentPath = await fs.realpath(parentDir); 106 | const normalizedParent = normalizePath(realParentPath); 107 | if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) { 108 | throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`); 109 | } 110 | return absolute; 111 | } catch { 112 | throw new Error(`Parent directory does not exist: ${parentDir}`); 113 | } 114 | } 115 | throw error; 116 | } 117 | } 118 | 119 | 120 | // File Operations 121 | export async function getFileStats(filePath: string): Promise { 122 | const stats = await fs.stat(filePath); 123 | return { 124 | size: stats.size, 125 | created: stats.birthtime, 126 | modified: stats.mtime, 127 | accessed: stats.atime, 128 | isDirectory: stats.isDirectory(), 129 | isFile: stats.isFile(), 130 | permissions: stats.mode.toString(8).slice(-3), 131 | }; 132 | } 133 | 134 | export async function readFileContent(filePath: string, encoding: string = 'utf-8'): Promise { 135 | return await fs.readFile(filePath, encoding as BufferEncoding); 136 | } 137 | 138 | export async function writeFileContent(filePath: string, content: string): Promise { 139 | try { 140 | // Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists, 141 | // preventing writes through pre-existing symlinks 142 | await fs.writeFile(filePath, content, { encoding: "utf-8", flag: 'wx' }); 143 | } catch (error) { 144 | if ((error as NodeJS.ErrnoException).code === 'EEXIST') { 145 | // Security: Use atomic rename to prevent race conditions where symlinks 146 | // could be created between validation and write. Rename operations 147 | // replace the target file atomically and don't follow symlinks. 148 | const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`; 149 | try { 150 | await fs.writeFile(tempPath, content, 'utf-8'); 151 | await fs.rename(tempPath, filePath); 152 | } catch (renameError) { 153 | try { 154 | await fs.unlink(tempPath); 155 | } catch {} 156 | throw renameError; 157 | } 158 | } else { 159 | throw error; 160 | } 161 | } 162 | } 163 | 164 | 165 | // File Editing Functions 166 | interface FileEdit { 167 | oldText: string; 168 | newText: string; 169 | } 170 | 171 | export async function applyFileEdits( 172 | filePath: string, 173 | edits: FileEdit[], 174 | dryRun: boolean = false 175 | ): Promise { 176 | // Read file content and normalize line endings 177 | const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8')); 178 | 179 | // Apply edits sequentially 180 | let modifiedContent = content; 181 | for (const edit of edits) { 182 | const normalizedOld = normalizeLineEndings(edit.oldText); 183 | const normalizedNew = normalizeLineEndings(edit.newText); 184 | 185 | // If exact match exists, use it 186 | if (modifiedContent.includes(normalizedOld)) { 187 | modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew); 188 | continue; 189 | } 190 | 191 | // Otherwise, try line-by-line matching with flexibility for whitespace 192 | const oldLines = normalizedOld.split('\n'); 193 | const contentLines = modifiedContent.split('\n'); 194 | let matchFound = false; 195 | 196 | for (let i = 0; i <= contentLines.length - oldLines.length; i++) { 197 | const potentialMatch = contentLines.slice(i, i + oldLines.length); 198 | 199 | // Compare lines with normalized whitespace 200 | const isMatch = oldLines.every((oldLine, j) => { 201 | const contentLine = potentialMatch[j]; 202 | return oldLine.trim() === contentLine.trim(); 203 | }); 204 | 205 | if (isMatch) { 206 | // Preserve original indentation of first line 207 | const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''; 208 | const newLines = normalizedNew.split('\n').map((line, j) => { 209 | if (j === 0) return originalIndent + line.trimStart(); 210 | // For subsequent lines, try to preserve relative indentation 211 | const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''; 212 | const newIndent = line.match(/^\s*/)?.[0] || ''; 213 | if (oldIndent && newIndent) { 214 | const relativeIndent = newIndent.length - oldIndent.length; 215 | return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart(); 216 | } 217 | return line; 218 | }); 219 | 220 | contentLines.splice(i, oldLines.length, ...newLines); 221 | modifiedContent = contentLines.join('\n'); 222 | matchFound = true; 223 | break; 224 | } 225 | } 226 | 227 | if (!matchFound) { 228 | throw new Error(`Could not find exact match for edit:\n${edit.oldText}`); 229 | } 230 | } 231 | 232 | // Create unified diff 233 | const diff = createUnifiedDiff(content, modifiedContent, filePath); 234 | 235 | // Format diff with appropriate number of backticks 236 | let numBackticks = 3; 237 | while (diff.includes('`'.repeat(numBackticks))) { 238 | numBackticks++; 239 | } 240 | const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`; 241 | 242 | if (!dryRun) { 243 | // Security: Use atomic rename to prevent race conditions where symlinks 244 | // could be created between validation and write. Rename operations 245 | // replace the target file atomically and don't follow symlinks. 246 | const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`; 247 | try { 248 | await fs.writeFile(tempPath, modifiedContent, 'utf-8'); 249 | await fs.rename(tempPath, filePath); 250 | } catch (error) { 251 | try { 252 | await fs.unlink(tempPath); 253 | } catch {} 254 | throw error; 255 | } 256 | } 257 | 258 | return formattedDiff; 259 | } 260 | 261 | // Memory-efficient implementation to get the last N lines of a file 262 | export async function tailFile(filePath: string, numLines: number): Promise { 263 | const CHUNK_SIZE = 1024; // Read 1KB at a time 264 | const stats = await fs.stat(filePath); 265 | const fileSize = stats.size; 266 | 267 | if (fileSize === 0) return ''; 268 | 269 | // Open file for reading 270 | const fileHandle = await fs.open(filePath, 'r'); 271 | try { 272 | const lines: string[] = []; 273 | let position = fileSize; 274 | let chunk = Buffer.alloc(CHUNK_SIZE); 275 | let linesFound = 0; 276 | let remainingText = ''; 277 | 278 | // Read chunks from the end of the file until we have enough lines 279 | while (position > 0 && linesFound < numLines) { 280 | const size = Math.min(CHUNK_SIZE, position); 281 | position -= size; 282 | 283 | const { bytesRead } = await fileHandle.read(chunk, 0, size, position); 284 | if (!bytesRead) break; 285 | 286 | // Get the chunk as a string and prepend any remaining text from previous iteration 287 | const readData = chunk.slice(0, bytesRead).toString('utf-8'); 288 | const chunkText = readData + remainingText; 289 | 290 | // Split by newlines and count 291 | const chunkLines = normalizeLineEndings(chunkText).split('\n'); 292 | 293 | // If this isn't the end of the file, the first line is likely incomplete 294 | // Save it to prepend to the next chunk 295 | if (position > 0) { 296 | remainingText = chunkLines[0]; 297 | chunkLines.shift(); // Remove the first (incomplete) line 298 | } 299 | 300 | // Add lines to our result (up to the number we need) 301 | for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) { 302 | lines.unshift(chunkLines[i]); 303 | linesFound++; 304 | } 305 | } 306 | 307 | return lines.join('\n'); 308 | } finally { 309 | await fileHandle.close(); 310 | } 311 | } 312 | 313 | // New function to get the first N lines of a file 314 | export async function headFile(filePath: string, numLines: number): Promise { 315 | const fileHandle = await fs.open(filePath, 'r'); 316 | try { 317 | const lines: string[] = []; 318 | let buffer = ''; 319 | let bytesRead = 0; 320 | const chunk = Buffer.alloc(1024); // 1KB buffer 321 | 322 | // Read chunks and count lines until we have enough or reach EOF 323 | while (lines.length < numLines) { 324 | const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead); 325 | if (result.bytesRead === 0) break; // End of file 326 | bytesRead += result.bytesRead; 327 | buffer += chunk.slice(0, result.bytesRead).toString('utf-8'); 328 | 329 | const newLineIndex = buffer.lastIndexOf('\n'); 330 | if (newLineIndex !== -1) { 331 | const completeLines = buffer.slice(0, newLineIndex).split('\n'); 332 | buffer = buffer.slice(newLineIndex + 1); 333 | for (const line of completeLines) { 334 | lines.push(line); 335 | if (lines.length >= numLines) break; 336 | } 337 | } 338 | } 339 | 340 | // If there is leftover content and we still need lines, add it 341 | if (buffer.length > 0 && lines.length < numLines) { 342 | lines.push(buffer); 343 | } 344 | 345 | return lines.join('\n'); 346 | } finally { 347 | await fileHandle.close(); 348 | } 349 | } 350 | 351 | export async function searchFilesWithValidation( 352 | rootPath: string, 353 | pattern: string, 354 | allowedDirectories: string[], 355 | options: SearchOptions = {} 356 | ): Promise { 357 | const { excludePatterns = [] } = options; 358 | const results: string[] = []; 359 | 360 | async function search(currentPath: string) { 361 | const entries = await fs.readdir(currentPath, { withFileTypes: true }); 362 | 363 | for (const entry of entries) { 364 | const fullPath = path.join(currentPath, entry.name); 365 | 366 | try { 367 | await validatePath(fullPath); 368 | 369 | const relativePath = path.relative(rootPath, fullPath); 370 | const shouldExclude = excludePatterns.some(excludePattern => 371 | minimatch(relativePath, excludePattern, { dot: true }) 372 | ); 373 | 374 | if (shouldExclude) continue; 375 | 376 | // Use glob matching for the search pattern 377 | if (minimatch(relativePath, pattern, { dot: true })) { 378 | results.push(fullPath); 379 | } 380 | 381 | if (entry.isDirectory()) { 382 | await search(fullPath); 383 | } 384 | } catch { 385 | continue; 386 | } 387 | } 388 | } 389 | 390 | await search(rootPath); 391 | return results; 392 | } 393 | -------------------------------------------------------------------------------- /src/filesystem/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-filesystem", 3 | "version": "0.6.3", 4 | "description": "MCP server for filesystem access", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-filesystem": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch", 20 | "test": "jest --config=jest.config.cjs --coverage" 21 | }, 22 | "dependencies": { 23 | "@modelcontextprotocol/sdk": "^1.17.0", 24 | "diff": "^5.1.0", 25 | "glob": "^10.3.10", 26 | "minimatch": "^10.0.1", 27 | "zod-to-json-schema": "^3.23.5" 28 | }, 29 | "devDependencies": { 30 | "@jest/globals": "^29.7.0", 31 | "@types/diff": "^5.0.9", 32 | "@types/jest": "^29.5.14", 33 | "@types/minimatch": "^5.1.2", 34 | "@types/node": "^22", 35 | "jest": "^29.7.0", 36 | "shx": "^0.3.4", 37 | "ts-jest": "^29.1.1", 38 | "ts-node": "^10.9.2", 39 | "typescript": "^5.8.2" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/filesystem/path-utils.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import os from 'os'; 3 | 4 | /** 5 | * Converts WSL or Unix-style Windows paths to Windows format 6 | * @param p The path to convert 7 | * @returns Converted Windows path 8 | */ 9 | export function convertToWindowsPath(p: string): string { 10 | // Handle WSL paths (/mnt/c/...) 11 | if (p.startsWith('/mnt/')) { 12 | const driveLetter = p.charAt(5).toUpperCase(); 13 | const pathPart = p.slice(6).replace(/\//g, '\\'); 14 | return `${driveLetter}:${pathPart}`; 15 | } 16 | 17 | // Handle Unix-style Windows paths (/c/...) 18 | if (p.match(/^\/[a-zA-Z]\//)) { 19 | const driveLetter = p.charAt(1).toUpperCase(); 20 | const pathPart = p.slice(2).replace(/\//g, '\\'); 21 | return `${driveLetter}:${pathPart}`; 22 | } 23 | 24 | // Handle standard Windows paths, ensuring backslashes 25 | if (p.match(/^[a-zA-Z]:/)) { 26 | return p.replace(/\//g, '\\'); 27 | } 28 | 29 | // Leave non-Windows paths unchanged 30 | return p; 31 | } 32 | 33 | /** 34 | * Normalizes path by standardizing format while preserving OS-specific behavior 35 | * @param p The path to normalize 36 | * @returns Normalized path 37 | */ 38 | export function normalizePath(p: string): string { 39 | // Remove any surrounding quotes and whitespace 40 | p = p.trim().replace(/^["']|["']$/g, ''); 41 | 42 | // Check if this is a Unix path (starts with / but not a Windows or WSL path) 43 | const isUnixPath = p.startsWith('/') && 44 | !p.match(/^\/mnt\/[a-z]\//i) && 45 | !p.match(/^\/[a-zA-Z]\//); 46 | 47 | if (isUnixPath) { 48 | // For Unix paths, just normalize without converting to Windows format 49 | // Replace double slashes with single slashes and remove trailing slashes 50 | return p.replace(/\/+/g, '/').replace(/\/+$/, ''); 51 | } 52 | 53 | // Convert WSL or Unix-style Windows paths to Windows format 54 | p = convertToWindowsPath(p); 55 | 56 | // Handle double backslashes, preserving leading UNC \\ 57 | if (p.startsWith('\\\\')) { 58 | // For UNC paths, first normalize any excessive leading backslashes to exactly \\ 59 | // Then normalize double backslashes in the rest of the path 60 | let uncPath = p; 61 | // Replace multiple leading backslashes with exactly two 62 | uncPath = uncPath.replace(/^\\{2,}/, '\\\\'); 63 | // Now normalize any remaining double backslashes in the rest of the path 64 | const restOfPath = uncPath.substring(2).replace(/\\\\/g, '\\'); 65 | p = '\\\\' + restOfPath; 66 | } else { 67 | // For non-UNC paths, normalize all double backslashes 68 | p = p.replace(/\\\\/g, '\\'); 69 | } 70 | 71 | // Use Node's path normalization, which handles . and .. segments 72 | let normalized = path.normalize(p); 73 | 74 | // Fix UNC paths after normalization (path.normalize can remove a leading backslash) 75 | if (p.startsWith('\\\\') && !normalized.startsWith('\\\\')) { 76 | normalized = '\\' + normalized; 77 | } 78 | 79 | // Handle Windows paths: convert slashes and ensure drive letter is capitalized 80 | if (normalized.match(/^[a-zA-Z]:/)) { 81 | let result = normalized.replace(/\//g, '\\'); 82 | // Capitalize drive letter if present 83 | if (/^[a-z]:/.test(result)) { 84 | result = result.charAt(0).toUpperCase() + result.slice(1); 85 | } 86 | return result; 87 | } 88 | 89 | // For all other paths (including relative paths), convert forward slashes to backslashes 90 | // This ensures relative paths like "some/relative/path" become "some\\relative\\path" 91 | return normalized.replace(/\//g, '\\'); 92 | } 93 | 94 | /** 95 | * Expands home directory tildes in paths 96 | * @param filepath The path to expand 97 | * @returns Expanded path 98 | */ 99 | export function expandHome(filepath: string): string { 100 | if (filepath.startsWith('~/') || filepath === '~') { 101 | return path.join(os.homedir(), filepath.slice(1)); 102 | } 103 | return filepath; 104 | } 105 | -------------------------------------------------------------------------------- /src/filesystem/path-validation.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | 3 | /** 4 | * Checks if an absolute path is within any of the allowed directories. 5 | * 6 | * @param absolutePath - The absolute path to check (will be normalized) 7 | * @param allowedDirectories - Array of absolute allowed directory paths (will be normalized) 8 | * @returns true if the path is within an allowed directory, false otherwise 9 | * @throws Error if given relative paths after normalization 10 | */ 11 | export function isPathWithinAllowedDirectories(absolutePath: string, allowedDirectories: string[]): boolean { 12 | // Type validation 13 | if (typeof absolutePath !== 'string' || !Array.isArray(allowedDirectories)) { 14 | return false; 15 | } 16 | 17 | // Reject empty inputs 18 | if (!absolutePath || allowedDirectories.length === 0) { 19 | return false; 20 | } 21 | 22 | // Reject null bytes (forbidden in paths) 23 | if (absolutePath.includes('\x00')) { 24 | return false; 25 | } 26 | 27 | // Normalize the input path 28 | let normalizedPath: string; 29 | try { 30 | normalizedPath = path.resolve(path.normalize(absolutePath)); 31 | } catch { 32 | return false; 33 | } 34 | 35 | // Verify it's absolute after normalization 36 | if (!path.isAbsolute(normalizedPath)) { 37 | throw new Error('Path must be absolute after normalization'); 38 | } 39 | 40 | // Check against each allowed directory 41 | return allowedDirectories.some(dir => { 42 | if (typeof dir !== 'string' || !dir) { 43 | return false; 44 | } 45 | 46 | // Reject null bytes in allowed dirs 47 | if (dir.includes('\x00')) { 48 | return false; 49 | } 50 | 51 | // Normalize the allowed directory 52 | let normalizedDir: string; 53 | try { 54 | normalizedDir = path.resolve(path.normalize(dir)); 55 | } catch { 56 | return false; 57 | } 58 | 59 | // Verify allowed directory is absolute after normalization 60 | if (!path.isAbsolute(normalizedDir)) { 61 | throw new Error('Allowed directories must be absolute paths after normalization'); 62 | } 63 | 64 | // Check if normalizedPath is within normalizedDir 65 | // Path is inside if it's the same or a subdirectory 66 | if (normalizedPath === normalizedDir) { 67 | return true; 68 | } 69 | 70 | // Special case for root directory to avoid double slash 71 | // On Windows, we need to check if both paths are on the same drive 72 | if (normalizedDir === path.sep) { 73 | return normalizedPath.startsWith(path.sep); 74 | } 75 | 76 | // On Windows, also check for drive root (e.g., "C:\") 77 | if (path.sep === '\\' && normalizedDir.match(/^[A-Za-z]:\\?$/)) { 78 | // Ensure both paths are on the same drive 79 | const dirDrive = normalizedDir.charAt(0).toLowerCase(); 80 | const pathDrive = normalizedPath.charAt(0).toLowerCase(); 81 | return pathDrive === dirDrive && normalizedPath.startsWith(normalizedDir.replace(/\\?$/, '\\')); 82 | } 83 | 84 | return normalizedPath.startsWith(normalizedDir + path.sep); 85 | }); 86 | } 87 | -------------------------------------------------------------------------------- /src/filesystem/roots-utils.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, type Stats } from 'fs'; 2 | import path from 'path'; 3 | import os from 'os'; 4 | import { normalizePath } from './path-utils.js'; 5 | import type { Root } from '@modelcontextprotocol/sdk/types.js'; 6 | 7 | /** 8 | * Converts a root URI to a normalized directory path with basic security validation. 9 | * @param rootUri - File URI (file://...) or plain directory path 10 | * @returns Promise resolving to validated path or null if invalid 11 | */ 12 | async function parseRootUri(rootUri: string): Promise { 13 | try { 14 | const rawPath = rootUri.startsWith('file://') ? rootUri.slice(7) : rootUri; 15 | const expandedPath = rawPath.startsWith('~/') || rawPath === '~' 16 | ? path.join(os.homedir(), rawPath.slice(1)) 17 | : rawPath; 18 | const absolutePath = path.resolve(expandedPath); 19 | const resolvedPath = await fs.realpath(absolutePath); 20 | return normalizePath(resolvedPath); 21 | } catch { 22 | return null; // Path doesn't exist or other error 23 | } 24 | } 25 | 26 | /** 27 | * Formats error message for directory validation failures. 28 | * @param dir - Directory path that failed validation 29 | * @param error - Error that occurred during validation 30 | * @param reason - Specific reason for failure 31 | * @returns Formatted error message 32 | */ 33 | function formatDirectoryError(dir: string, error?: unknown, reason?: string): string { 34 | if (reason) { 35 | return `Skipping ${reason}: ${dir}`; 36 | } 37 | const message = error instanceof Error ? error.message : String(error); 38 | return `Skipping invalid directory: ${dir} due to error: ${message}`; 39 | } 40 | 41 | /** 42 | * Resolves requested root directories from MCP root specifications. 43 | * 44 | * Converts root URI specifications (file:// URIs or plain paths) into normalized 45 | * directory paths, validating that each path exists and is a directory. 46 | * Includes symlink resolution for security. 47 | * 48 | * @param requestedRoots - Array of root specifications with URI and optional name 49 | * @returns Promise resolving to array of validated directory paths 50 | */ 51 | export async function getValidRootDirectories( 52 | requestedRoots: readonly Root[] 53 | ): Promise { 54 | const validatedDirectories: string[] = []; 55 | 56 | for (const requestedRoot of requestedRoots) { 57 | const resolvedPath = await parseRootUri(requestedRoot.uri); 58 | if (!resolvedPath) { 59 | console.error(formatDirectoryError(requestedRoot.uri, undefined, 'invalid path or inaccessible')); 60 | continue; 61 | } 62 | 63 | try { 64 | const stats: Stats = await fs.stat(resolvedPath); 65 | if (stats.isDirectory()) { 66 | validatedDirectories.push(resolvedPath); 67 | } else { 68 | console.error(formatDirectoryError(resolvedPath, undefined, 'non-directory root')); 69 | } 70 | } catch (error) { 71 | console.error(formatDirectoryError(resolvedPath, error)); 72 | } 73 | } 74 | 75 | return validatedDirectories; 76 | } -------------------------------------------------------------------------------- /src/filesystem/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "moduleResolution": "NodeNext", 7 | "module": "NodeNext" 8 | }, 9 | "include": [ 10 | "./**/*.ts" 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/git/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .venv 3 | -------------------------------------------------------------------------------- /src/git/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/git/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | RUN apt-get update && apt-get install -y git git-lfs && rm -rf /var/lib/apt/lists/* \ 28 | && git lfs install --system 29 | 30 | WORKDIR /app 31 | 32 | COPY --from=uv /root/.local /root/.local 33 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 34 | 35 | # Place executables in the environment at the front of the path 36 | ENV PATH="/app/.venv/bin:$PATH" 37 | 38 | # when running the container, add --db-path and a bind mount to the host's db file 39 | ENTRYPOINT ["mcp-server-git"] 40 | -------------------------------------------------------------------------------- /src/git/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /src/git/README.md: -------------------------------------------------------------------------------- 1 | # mcp-server-git: A git MCP server 2 | 3 | ## Overview 4 | 5 | A Model Context Protocol server for Git repository interaction and automation. This server provides tools to read, search, and manipulate Git repositories via Large Language Models. 6 | 7 | Please note that mcp-server-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server. 8 | 9 | ### Tools 10 | 11 | 1. `git_status` 12 | - Shows the working tree status 13 | - Input: 14 | - `repo_path` (string): Path to Git repository 15 | - Returns: Current status of working directory as text output 16 | 17 | 2. `git_diff_unstaged` 18 | - Shows changes in working directory not yet staged 19 | - Inputs: 20 | - `repo_path` (string): Path to Git repository 21 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 22 | - Returns: Diff output of unstaged changes 23 | 24 | 3. `git_diff_staged` 25 | - Shows changes that are staged for commit 26 | - Inputs: 27 | - `repo_path` (string): Path to Git repository 28 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 29 | - Returns: Diff output of staged changes 30 | 31 | 4. `git_diff` 32 | - Shows differences between branches or commits 33 | - Inputs: 34 | - `repo_path` (string): Path to Git repository 35 | - `target` (string): Target branch or commit to compare with 36 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 37 | - Returns: Diff output comparing current state with target 38 | 39 | 5. `git_commit` 40 | - Records changes to the repository 41 | - Inputs: 42 | - `repo_path` (string): Path to Git repository 43 | - `message` (string): Commit message 44 | - Returns: Confirmation with new commit hash 45 | 46 | 6. `git_add` 47 | - Adds file contents to the staging area 48 | - Inputs: 49 | - `repo_path` (string): Path to Git repository 50 | - `files` (string[]): Array of file paths to stage 51 | - Returns: Confirmation of staged files 52 | 53 | 7. `git_reset` 54 | - Unstages all staged changes 55 | - Input: 56 | - `repo_path` (string): Path to Git repository 57 | - Returns: Confirmation of reset operation 58 | 59 | 8. `git_log` 60 | - Shows the commit logs with optional date filtering 61 | - Inputs: 62 | - `repo_path` (string): Path to Git repository 63 | - `max_count` (number, optional): Maximum number of commits to show (default: 10) 64 | - `start_timestamp` (string, optional): Start timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024') 65 | - `end_timestamp` (string, optional): End timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024') 66 | - Returns: Array of commit entries with hash, author, date, and message 67 | 68 | 9. `git_create_branch` 69 | - Creates a new branch 70 | - Inputs: 71 | - `repo_path` (string): Path to Git repository 72 | - `branch_name` (string): Name of the new branch 73 | - `start_point` (string, optional): Starting point for the new branch 74 | - Returns: Confirmation of branch creation 75 | 10. `git_checkout` 76 | - Switches branches 77 | - Inputs: 78 | - `repo_path` (string): Path to Git repository 79 | - `branch_name` (string): Name of branch to checkout 80 | - Returns: Confirmation of branch switch 81 | 11. `git_show` 82 | - Shows the contents of a commit 83 | - Inputs: 84 | - `repo_path` (string): Path to Git repository 85 | - `revision` (string): The revision (commit hash, branch name, tag) to show 86 | - Returns: Contents of the specified commit 87 | 12. `git_init` 88 | - Initializes a Git repository 89 | - Inputs: 90 | - `repo_path` (string): Path to directory to initialize git repo 91 | - Returns: Confirmation of repository initialization 92 | 93 | 13. `git_branch` 94 | - List Git branches 95 | - Inputs: 96 | - `repo_path` (string): Path to the Git repository. 97 | - `branch_type` (string): Whether to list local branches ('local'), remote branches ('remote') or all branches('all'). 98 | - `contains` (string, optional): The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified 99 | - `not_contains` (string, optional): The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified 100 | - Returns: List of branches 101 | 102 | ## Installation 103 | 104 | ### Using uv (recommended) 105 | 106 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 107 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-git*. 108 | 109 | ### Using PIP 110 | 111 | Alternatively you can install `mcp-server-git` via pip: 112 | 113 | ``` 114 | pip install mcp-server-git 115 | ``` 116 | 117 | After installation, you can run it as a script using: 118 | 119 | ``` 120 | python -m mcp_server_git 121 | ``` 122 | 123 | ## Configuration 124 | 125 | ### Usage with Claude Desktop 126 | 127 | Add this to your `claude_desktop_config.json`: 128 | 129 |
130 | Using uvx 131 | 132 | ```json 133 | "mcpServers": { 134 | "git": { 135 | "command": "uvx", 136 | "args": ["mcp-server-git", "--repository", "path/to/git/repo"] 137 | } 138 | } 139 | ``` 140 |
141 | 142 |
143 | Using docker 144 | 145 | * Note: replace '/Users/username' with the a path that you want to be accessible by this tool 146 | 147 | ```json 148 | "mcpServers": { 149 | "git": { 150 | "command": "docker", 151 | "args": ["run", "--rm", "-i", "--mount", "type=bind,src=/Users/username,dst=/Users/username", "mcp/git"] 152 | } 153 | } 154 | ``` 155 |
156 | 157 |
158 | Using pip installation 159 | 160 | ```json 161 | "mcpServers": { 162 | "git": { 163 | "command": "python", 164 | "args": ["-m", "mcp_server_git", "--repository", "path/to/git/repo"] 165 | } 166 | } 167 | ``` 168 |
169 | 170 | ### Usage with VS Code 171 | 172 | For quick installation, use one of the one-click install buttons below... 173 | 174 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D&quality=insiders) 175 | 176 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D&quality=insiders) 177 | 178 | For manual installation, you can configure the MCP server using one of these methods: 179 | 180 | **Method 1: User Configuration (Recommended)** 181 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 182 | 183 | **Method 2: Workspace Configuration** 184 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 185 | 186 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/mcp). 187 | 188 | ```json 189 | { 190 | "servers": { 191 | "git": { 192 | "command": "uvx", 193 | "args": ["mcp-server-git"] 194 | } 195 | } 196 | } 197 | ``` 198 | 199 | For Docker installation: 200 | 201 | ```json 202 | { 203 | "mcp": { 204 | "servers": { 205 | "git": { 206 | "command": "docker", 207 | "args": [ 208 | "run", 209 | "--rm", 210 | "-i", 211 | "--mount", "type=bind,src=${workspaceFolder},dst=/workspace", 212 | "mcp/git" 213 | ] 214 | } 215 | } 216 | } 217 | } 218 | ``` 219 | 220 | ### Usage with [Zed](https://github.com/zed-industries/zed) 221 | 222 | Add to your Zed settings.json: 223 | 224 |
225 | Using uvx 226 | 227 | ```json 228 | "context_servers": [ 229 | "mcp-server-git": { 230 | "command": { 231 | "path": "uvx", 232 | "args": ["mcp-server-git"] 233 | } 234 | } 235 | ], 236 | ``` 237 |
238 | 239 |
240 | Using pip installation 241 | 242 | ```json 243 | "context_servers": { 244 | "mcp-server-git": { 245 | "command": { 246 | "path": "python", 247 | "args": ["-m", "mcp_server_git"] 248 | } 249 | } 250 | }, 251 | ``` 252 |
253 | 254 | ### Usage with [Zencoder](https://zencoder.ai) 255 | 256 | 1. Go to the Zencoder menu (...) 257 | 2. From the dropdown menu, select `Agent Tools` 258 | 3. Click on the `Add Custom MCP` 259 | 4. Add the name (i.e. git) and server configuration from below, and make sure to hit the `Install` button 260 | 261 |
262 | Using uvx 263 | 264 | ```json 265 | { 266 | "command": "uvx", 267 | "args": ["mcp-server-git", "--repository", "path/to/git/repo"] 268 | } 269 | ``` 270 |
271 | 272 | ## Debugging 273 | 274 | You can use the MCP inspector to debug the server. For uvx installations: 275 | 276 | ``` 277 | npx @modelcontextprotocol/inspector uvx mcp-server-git 278 | ``` 279 | 280 | Or if you've installed the package in a specific directory or are developing on it: 281 | 282 | ``` 283 | cd path/to/servers/src/git 284 | npx @modelcontextprotocol/inspector uv run mcp-server-git 285 | ``` 286 | 287 | Running `tail -n 20 -f ~/Library/Logs/Claude/mcp*.log` will show the logs from the server and may 288 | help you debug any issues. 289 | 290 | ## Development 291 | 292 | If you are doing local development, there are two ways to test your changes: 293 | 294 | 1. Run the MCP inspector to test your changes. See [Debugging](#debugging) for run instructions. 295 | 296 | 2. Test using the Claude desktop app. Add the following to your `claude_desktop_config.json`: 297 | 298 | ### Docker 299 | 300 | ```json 301 | { 302 | "mcpServers": { 303 | "git": { 304 | "command": "docker", 305 | "args": [ 306 | "run", 307 | "--rm", 308 | "-i", 309 | "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop", 310 | "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro", 311 | "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt", 312 | "mcp/git" 313 | ] 314 | } 315 | } 316 | } 317 | ``` 318 | 319 | ### UVX 320 | ```json 321 | { 322 | "mcpServers": { 323 | "git": { 324 | "command": "uv", 325 | "args": [ 326 | "--directory", 327 | "//mcp-servers/src/git", 328 | "run", 329 | "mcp-server-git" 330 | ] 331 | } 332 | } 333 | } 334 | ``` 335 | 336 | ## Build 337 | 338 | Docker build: 339 | 340 | ```bash 341 | cd src/git 342 | docker build -t mcp/git . 343 | ``` 344 | 345 | ## License 346 | 347 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 348 | -------------------------------------------------------------------------------- /src/git/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-git" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "David Soria Parra", email = "davidsp@anthropic.com" }] 9 | keywords = ["git", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "click>=8.1.7", 20 | "gitpython>=3.1.43", 21 | "mcp>=1.0.0", 22 | "pydantic>=2.0.0", 23 | ] 24 | 25 | [project.scripts] 26 | mcp-server-git = "mcp_server_git:main" 27 | 28 | [build-system] 29 | requires = ["hatchling"] 30 | build-backend = "hatchling.build" 31 | 32 | [tool.uv] 33 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3", "pytest>=8.0.0"] 34 | 35 | [tool.pytest.ini_options] 36 | testpaths = ["tests"] 37 | python_files = "test_*.py" 38 | python_classes = "Test*" 39 | python_functions = "test_*" 40 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__init__.py: -------------------------------------------------------------------------------- 1 | import click 2 | from pathlib import Path 3 | import logging 4 | import sys 5 | from .server import serve 6 | 7 | @click.command() 8 | @click.option("--repository", "-r", type=Path, help="Git repository path") 9 | @click.option("-v", "--verbose", count=True) 10 | def main(repository: Path | None, verbose: bool) -> None: 11 | """MCP Git Server - Git functionality for MCP""" 12 | import asyncio 13 | 14 | logging_level = logging.WARN 15 | if verbose == 1: 16 | logging_level = logging.INFO 17 | elif verbose >= 2: 18 | logging_level = logging.DEBUG 19 | 20 | logging.basicConfig(level=logging_level, stream=sys.stderr) 21 | asyncio.run(serve(repository)) 22 | 23 | if __name__ == "__main__": 24 | main() 25 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_git import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/modelcontextprotocol/servers/ae0be7d7c3128f2811c10d1d9f85d1c713c5c384/src/git/src/mcp_server_git/py.typed -------------------------------------------------------------------------------- /src/git/tests/test_server.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | import git 4 | from mcp_server_git.server import git_checkout, git_branch, git_add 5 | import shutil 6 | 7 | @pytest.fixture 8 | def test_repository(tmp_path: Path): 9 | repo_path = tmp_path / "temp_test_repo" 10 | test_repo = git.Repo.init(repo_path) 11 | 12 | Path(repo_path / "test.txt").write_text("test") 13 | test_repo.index.add(["test.txt"]) 14 | test_repo.index.commit("initial commit") 15 | 16 | yield test_repo 17 | 18 | shutil.rmtree(repo_path) 19 | 20 | def test_git_checkout_existing_branch(test_repository): 21 | test_repository.git.branch("test-branch") 22 | result = git_checkout(test_repository, "test-branch") 23 | 24 | assert "Switched to branch 'test-branch'" in result 25 | assert test_repository.active_branch.name == "test-branch" 26 | 27 | def test_git_checkout_nonexistent_branch(test_repository): 28 | 29 | with pytest.raises(git.GitCommandError): 30 | git_checkout(test_repository, "nonexistent-branch") 31 | 32 | def test_git_branch_local(test_repository): 33 | test_repository.git.branch("new-branch-local") 34 | result = git_branch(test_repository, "local") 35 | assert "new-branch-local" in result 36 | 37 | def test_git_branch_remote(test_repository): 38 | # GitPython does not easily support creating remote branches without a remote. 39 | # This test will check the behavior when 'remote' is specified without actual remotes. 40 | result = git_branch(test_repository, "remote") 41 | assert "" == result.strip() # Should be empty if no remote branches 42 | 43 | def test_git_branch_all(test_repository): 44 | test_repository.git.branch("new-branch-all") 45 | result = git_branch(test_repository, "all") 46 | assert "new-branch-all" in result 47 | 48 | def test_git_branch_contains(test_repository): 49 | # Create a new branch and commit to it 50 | test_repository.git.checkout("-b", "feature-branch") 51 | Path(test_repository.working_dir / Path("feature.txt")).write_text("feature content") 52 | test_repository.index.add(["feature.txt"]) 53 | commit = test_repository.index.commit("feature commit") 54 | test_repository.git.checkout("master") 55 | 56 | result = git_branch(test_repository, "local", contains=commit.hexsha) 57 | assert "feature-branch" in result 58 | assert "master" not in result 59 | 60 | def test_git_branch_not_contains(test_repository): 61 | # Create a new branch and commit to it 62 | test_repository.git.checkout("-b", "another-feature-branch") 63 | Path(test_repository.working_dir / Path("another_feature.txt")).write_text("another feature content") 64 | test_repository.index.add(["another_feature.txt"]) 65 | commit = test_repository.index.commit("another feature commit") 66 | test_repository.git.checkout("master") 67 | 68 | result = git_branch(test_repository, "local", not_contains=commit.hexsha) 69 | assert "another-feature-branch" not in result 70 | assert "master" in result 71 | 72 | def test_git_add_all_files(test_repository): 73 | file_path = Path(test_repository.working_dir) / "all_file.txt" 74 | file_path.write_text("adding all") 75 | 76 | result = git_add(test_repository, ["."]) 77 | 78 | staged_files = [item.a_path for item in test_repository.index.diff("HEAD")] 79 | assert "all_file.txt" in staged_files 80 | assert result == "Files staged successfully" 81 | 82 | def test_git_add_specific_files(test_repository): 83 | file1 = Path(test_repository.working_dir) / "file1.txt" 84 | file2 = Path(test_repository.working_dir) / "file2.txt" 85 | file1.write_text("file 1 content") 86 | file2.write_text("file 2 content") 87 | 88 | result = git_add(test_repository, ["file1.txt"]) 89 | 90 | staged_files = [item.a_path for item in test_repository.index.diff("HEAD")] 91 | assert "file1.txt" in staged_files 92 | assert "file2.txt" not in staged_files 93 | assert result == "Files staged successfully" 94 | -------------------------------------------------------------------------------- /src/memory/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/memory /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/memory/README.md: -------------------------------------------------------------------------------- 1 | # Knowledge Graph Memory Server 2 | 3 | A basic implementation of persistent memory using a local knowledge graph. This lets Claude remember information about the user across chats. 4 | 5 | ## Core Concepts 6 | 7 | ### Entities 8 | Entities are the primary nodes in the knowledge graph. Each entity has: 9 | - A unique name (identifier) 10 | - An entity type (e.g., "person", "organization", "event") 11 | - A list of observations 12 | 13 | Example: 14 | ```json 15 | { 16 | "name": "John_Smith", 17 | "entityType": "person", 18 | "observations": ["Speaks fluent Spanish"] 19 | } 20 | ``` 21 | 22 | ### Relations 23 | Relations define directed connections between entities. They are always stored in active voice and describe how entities interact or relate to each other. 24 | 25 | Example: 26 | ```json 27 | { 28 | "from": "John_Smith", 29 | "to": "Anthropic", 30 | "relationType": "works_at" 31 | } 32 | ``` 33 | ### Observations 34 | Observations are discrete pieces of information about an entity. They are: 35 | 36 | - Stored as strings 37 | - Attached to specific entities 38 | - Can be added or removed independently 39 | - Should be atomic (one fact per observation) 40 | 41 | Example: 42 | ```json 43 | { 44 | "entityName": "John_Smith", 45 | "observations": [ 46 | "Speaks fluent Spanish", 47 | "Graduated in 2019", 48 | "Prefers morning meetings" 49 | ] 50 | } 51 | ``` 52 | 53 | ## API 54 | 55 | ### Tools 56 | - **create_entities** 57 | - Create multiple new entities in the knowledge graph 58 | - Input: `entities` (array of objects) 59 | - Each object contains: 60 | - `name` (string): Entity identifier 61 | - `entityType` (string): Type classification 62 | - `observations` (string[]): Associated observations 63 | - Ignores entities with existing names 64 | 65 | - **create_relations** 66 | - Create multiple new relations between entities 67 | - Input: `relations` (array of objects) 68 | - Each object contains: 69 | - `from` (string): Source entity name 70 | - `to` (string): Target entity name 71 | - `relationType` (string): Relationship type in active voice 72 | - Skips duplicate relations 73 | 74 | - **add_observations** 75 | - Add new observations to existing entities 76 | - Input: `observations` (array of objects) 77 | - Each object contains: 78 | - `entityName` (string): Target entity 79 | - `contents` (string[]): New observations to add 80 | - Returns added observations per entity 81 | - Fails if entity doesn't exist 82 | 83 | - **delete_entities** 84 | - Remove entities and their relations 85 | - Input: `entityNames` (string[]) 86 | - Cascading deletion of associated relations 87 | - Silent operation if entity doesn't exist 88 | 89 | - **delete_observations** 90 | - Remove specific observations from entities 91 | - Input: `deletions` (array of objects) 92 | - Each object contains: 93 | - `entityName` (string): Target entity 94 | - `observations` (string[]): Observations to remove 95 | - Silent operation if observation doesn't exist 96 | 97 | - **delete_relations** 98 | - Remove specific relations from the graph 99 | - Input: `relations` (array of objects) 100 | - Each object contains: 101 | - `from` (string): Source entity name 102 | - `to` (string): Target entity name 103 | - `relationType` (string): Relationship type 104 | - Silent operation if relation doesn't exist 105 | 106 | - **read_graph** 107 | - Read the entire knowledge graph 108 | - No input required 109 | - Returns complete graph structure with all entities and relations 110 | 111 | - **search_nodes** 112 | - Search for nodes based on query 113 | - Input: `query` (string) 114 | - Searches across: 115 | - Entity names 116 | - Entity types 117 | - Observation content 118 | - Returns matching entities and their relations 119 | 120 | - **open_nodes** 121 | - Retrieve specific nodes by name 122 | - Input: `names` (string[]) 123 | - Returns: 124 | - Requested entities 125 | - Relations between requested entities 126 | - Silently skips non-existent nodes 127 | 128 | # Usage with Claude Desktop 129 | 130 | ### Setup 131 | 132 | Add this to your claude_desktop_config.json: 133 | 134 | #### Docker 135 | 136 | ```json 137 | { 138 | "mcpServers": { 139 | "memory": { 140 | "command": "docker", 141 | "args": ["run", "-i", "-v", "claude-memory:/app/dist", "--rm", "mcp/memory"] 142 | } 143 | } 144 | } 145 | ``` 146 | 147 | #### NPX 148 | ```json 149 | { 150 | "mcpServers": { 151 | "memory": { 152 | "command": "npx", 153 | "args": [ 154 | "-y", 155 | "@modelcontextprotocol/server-memory" 156 | ] 157 | } 158 | } 159 | } 160 | ``` 161 | 162 | #### NPX with custom setting 163 | 164 | The server can be configured using the following environment variables: 165 | 166 | ```json 167 | { 168 | "mcpServers": { 169 | "memory": { 170 | "command": "npx", 171 | "args": [ 172 | "-y", 173 | "@modelcontextprotocol/server-memory" 174 | ], 175 | "env": { 176 | "MEMORY_FILE_PATH": "/path/to/custom/memory.json" 177 | } 178 | } 179 | } 180 | } 181 | ``` 182 | 183 | - `MEMORY_FILE_PATH`: Path to the memory storage JSON file (default: `memory.json` in the server directory) 184 | 185 | # VS Code Installation Instructions 186 | 187 | For quick installation, use one of the one-click installation buttons below: 188 | 189 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D&quality=insiders) 190 | 191 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D&quality=insiders) 192 | 193 | For manual installation, you can configure the MCP server using one of these methods: 194 | 195 | **Method 1: User Configuration (Recommended)** 196 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 197 | 198 | **Method 2: Workspace Configuration** 199 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 200 | 201 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/mcp). 202 | 203 | #### NPX 204 | 205 | ```json 206 | { 207 | "servers": { 208 | "memory": { 209 | "command": "npx", 210 | "args": [ 211 | "-y", 212 | "@modelcontextprotocol/server-memory" 213 | ] 214 | } 215 | } 216 | } 217 | ``` 218 | 219 | #### Docker 220 | 221 | ```json 222 | { 223 | "servers": { 224 | "memory": { 225 | "command": "docker", 226 | "args": [ 227 | "run", 228 | "-i", 229 | "-v", 230 | "claude-memory:/app/dist", 231 | "--rm", 232 | "mcp/memory" 233 | ] 234 | } 235 | } 236 | } 237 | ``` 238 | 239 | ### System Prompt 240 | 241 | The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created. 242 | 243 | Here is an example prompt for chat personalization. You could use this prompt in the "Custom Instructions" field of a [Claude.ai Project](https://www.anthropic.com/news/projects). 244 | 245 | ``` 246 | Follow these steps for each interaction: 247 | 248 | 1. User Identification: 249 | - You should assume that you are interacting with default_user 250 | - If you have not identified default_user, proactively try to do so. 251 | 252 | 2. Memory Retrieval: 253 | - Always begin your chat by saying only "Remembering..." and retrieve all relevant information from your knowledge graph 254 | - Always refer to your knowledge graph as your "memory" 255 | 256 | 3. Memory 257 | - While conversing with the user, be attentive to any new information that falls into these categories: 258 | a) Basic Identity (age, gender, location, job title, education level, etc.) 259 | b) Behaviors (interests, habits, etc.) 260 | c) Preferences (communication style, preferred language, etc.) 261 | d) Goals (goals, targets, aspirations, etc.) 262 | e) Relationships (personal and professional relationships up to 3 degrees of separation) 263 | 264 | 4. Memory Update: 265 | - If any new information was gathered during the interaction, update your memory as follows: 266 | a) Create entities for recurring organizations, people, and significant events 267 | b) Connect them to the current entities using relations 268 | c) Store facts about them as observations 269 | ``` 270 | 271 | ## Building 272 | 273 | Docker: 274 | 275 | ```sh 276 | docker build -t mcp/memory -f src/memory/Dockerfile . 277 | ``` 278 | 279 | For Awareness: a prior mcp/memory volume contains an index.js file that could be overwritten by the new container. If you are using a docker volume for storage, delete the old docker volume's `index.js` file before starting the new container. 280 | 281 | ## License 282 | 283 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 284 | -------------------------------------------------------------------------------- /src/memory/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-memory", 3 | "version": "0.6.3", 4 | "description": "MCP server for enabling memory for Claude through a knowledge graph", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-memory": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1" 23 | }, 24 | "devDependencies": { 25 | "@types/node": "^22", 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } -------------------------------------------------------------------------------- /src/memory/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/sequentialthinking/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/sequentialthinking /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] 25 | -------------------------------------------------------------------------------- /src/sequentialthinking/README.md: -------------------------------------------------------------------------------- 1 | # Sequential Thinking MCP Server 2 | 3 | An MCP server implementation that provides a tool for dynamic and reflective problem-solving through a structured thinking process. 4 | 5 | ## Features 6 | 7 | - Break down complex problems into manageable steps 8 | - Revise and refine thoughts as understanding deepens 9 | - Branch into alternative paths of reasoning 10 | - Adjust the total number of thoughts dynamically 11 | - Generate and verify solution hypotheses 12 | 13 | ## Tool 14 | 15 | ### sequential_thinking 16 | 17 | Facilitates a detailed, step-by-step thinking process for problem-solving and analysis. 18 | 19 | **Inputs:** 20 | - `thought` (string): The current thinking step 21 | - `nextThoughtNeeded` (boolean): Whether another thought step is needed 22 | - `thoughtNumber` (integer): Current thought number 23 | - `totalThoughts` (integer): Estimated total thoughts needed 24 | - `isRevision` (boolean, optional): Whether this revises previous thinking 25 | - `revisesThought` (integer, optional): Which thought is being reconsidered 26 | - `branchFromThought` (integer, optional): Branching point thought number 27 | - `branchId` (string, optional): Branch identifier 28 | - `needsMoreThoughts` (boolean, optional): If more thoughts are needed 29 | 30 | ## Usage 31 | 32 | The Sequential Thinking tool is designed for: 33 | - Breaking down complex problems into steps 34 | - Planning and design with room for revision 35 | - Analysis that might need course correction 36 | - Problems where the full scope might not be clear initially 37 | - Tasks that need to maintain context over multiple steps 38 | - Situations where irrelevant information needs to be filtered out 39 | 40 | ## Configuration 41 | 42 | ### Usage with Claude Desktop 43 | 44 | Add this to your `claude_desktop_config.json`: 45 | 46 | #### npx 47 | 48 | ```json 49 | { 50 | "mcpServers": { 51 | "sequential-thinking": { 52 | "command": "npx", 53 | "args": [ 54 | "-y", 55 | "@modelcontextprotocol/server-sequential-thinking" 56 | ] 57 | } 58 | } 59 | } 60 | ``` 61 | 62 | #### docker 63 | 64 | ```json 65 | { 66 | "mcpServers": { 67 | "sequentialthinking": { 68 | "command": "docker", 69 | "args": [ 70 | "run", 71 | "--rm", 72 | "-i", 73 | "mcp/sequentialthinking" 74 | ] 75 | } 76 | } 77 | } 78 | ``` 79 | 80 | To disable logging of thought information set env var: `DISABLE_THOUGHT_LOGGING` to `true`. 81 | Comment 82 | 83 | ### Usage with VS Code 84 | 85 | For quick installation, click one of the installation buttons below... 86 | 87 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D&quality=insiders) 88 | 89 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D&quality=insiders) 90 | 91 | For manual installation, you can configure the MCP server using one of these methods: 92 | 93 | **Method 1: User Configuration (Recommended)** 94 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 95 | 96 | **Method 2: Workspace Configuration** 97 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 98 | 99 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/mcp). 100 | 101 | For NPX installation: 102 | 103 | ```json 104 | { 105 | "servers": { 106 | "sequential-thinking": { 107 | "command": "npx", 108 | "args": [ 109 | "-y", 110 | "@modelcontextprotocol/server-sequential-thinking" 111 | ] 112 | } 113 | } 114 | } 115 | ``` 116 | 117 | For Docker installation: 118 | 119 | ```json 120 | { 121 | "servers": { 122 | "sequential-thinking": { 123 | "command": "docker", 124 | "args": [ 125 | "run", 126 | "--rm", 127 | "-i", 128 | "mcp/sequentialthinking" 129 | ] 130 | } 131 | } 132 | } 133 | ``` 134 | 135 | ## Building 136 | 137 | Docker: 138 | 139 | ```bash 140 | docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile . 141 | ``` 142 | 143 | ## License 144 | 145 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 146 | -------------------------------------------------------------------------------- /src/sequentialthinking/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 5 | import { 6 | CallToolRequestSchema, 7 | ListToolsRequestSchema, 8 | Tool, 9 | } from "@modelcontextprotocol/sdk/types.js"; 10 | // Fixed chalk import for ESM 11 | import chalk from 'chalk'; 12 | 13 | interface ThoughtData { 14 | thought: string; 15 | thoughtNumber: number; 16 | totalThoughts: number; 17 | isRevision?: boolean; 18 | revisesThought?: number; 19 | branchFromThought?: number; 20 | branchId?: string; 21 | needsMoreThoughts?: boolean; 22 | nextThoughtNeeded: boolean; 23 | } 24 | 25 | class SequentialThinkingServer { 26 | private thoughtHistory: ThoughtData[] = []; 27 | private branches: Record = {}; 28 | private disableThoughtLogging: boolean; 29 | 30 | constructor() { 31 | this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true"; 32 | } 33 | 34 | private validateThoughtData(input: unknown): ThoughtData { 35 | const data = input as Record; 36 | 37 | if (!data.thought || typeof data.thought !== 'string') { 38 | throw new Error('Invalid thought: must be a string'); 39 | } 40 | if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') { 41 | throw new Error('Invalid thoughtNumber: must be a number'); 42 | } 43 | if (!data.totalThoughts || typeof data.totalThoughts !== 'number') { 44 | throw new Error('Invalid totalThoughts: must be a number'); 45 | } 46 | if (typeof data.nextThoughtNeeded !== 'boolean') { 47 | throw new Error('Invalid nextThoughtNeeded: must be a boolean'); 48 | } 49 | 50 | return { 51 | thought: data.thought, 52 | thoughtNumber: data.thoughtNumber, 53 | totalThoughts: data.totalThoughts, 54 | nextThoughtNeeded: data.nextThoughtNeeded, 55 | isRevision: data.isRevision as boolean | undefined, 56 | revisesThought: data.revisesThought as number | undefined, 57 | branchFromThought: data.branchFromThought as number | undefined, 58 | branchId: data.branchId as string | undefined, 59 | needsMoreThoughts: data.needsMoreThoughts as boolean | undefined, 60 | }; 61 | } 62 | 63 | private formatThought(thoughtData: ThoughtData): string { 64 | const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } = thoughtData; 65 | 66 | let prefix = ''; 67 | let context = ''; 68 | 69 | if (isRevision) { 70 | prefix = chalk.yellow('? Revision'); 71 | context = ` (revising thought ${revisesThought})`; 72 | } else if (branchFromThought) { 73 | prefix = chalk.green('? Branch'); 74 | context = ` (from thought ${branchFromThought}, ID: ${branchId})`; 75 | } else { 76 | prefix = chalk.blue('? Thought'); 77 | context = ''; 78 | } 79 | 80 | const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`; 81 | const border = '─'.repeat(Math.max(header.length, thought.length) + 4); 82 | 83 | return ` 84 | ┌${border}┐ 85 | │ ${header} │ 86 | ├${border}┤ 87 | │ ${thought.padEnd(border.length - 2)} │ 88 | └${border}┘`; 89 | } 90 | 91 | public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } { 92 | try { 93 | const validatedInput = this.validateThoughtData(input); 94 | 95 | if (validatedInput.thoughtNumber > validatedInput.totalThoughts) { 96 | validatedInput.totalThoughts = validatedInput.thoughtNumber; 97 | } 98 | 99 | this.thoughtHistory.push(validatedInput); 100 | 101 | if (validatedInput.branchFromThought && validatedInput.branchId) { 102 | if (!this.branches[validatedInput.branchId]) { 103 | this.branches[validatedInput.branchId] = []; 104 | } 105 | this.branches[validatedInput.branchId].push(validatedInput); 106 | } 107 | 108 | if (!this.disableThoughtLogging) { 109 | const formattedThought = this.formatThought(validatedInput); 110 | console.error(formattedThought); 111 | } 112 | 113 | return { 114 | content: [{ 115 | type: "text", 116 | text: JSON.stringify({ 117 | thoughtNumber: validatedInput.thoughtNumber, 118 | totalThoughts: validatedInput.totalThoughts, 119 | nextThoughtNeeded: validatedInput.nextThoughtNeeded, 120 | branches: Object.keys(this.branches), 121 | thoughtHistoryLength: this.thoughtHistory.length 122 | }, null, 2) 123 | }] 124 | }; 125 | } catch (error) { 126 | return { 127 | content: [{ 128 | type: "text", 129 | text: JSON.stringify({ 130 | error: error instanceof Error ? error.message : String(error), 131 | status: 'failed' 132 | }, null, 2) 133 | }], 134 | isError: true 135 | }; 136 | } 137 | } 138 | } 139 | 140 | const SEQUENTIAL_THINKING_TOOL: Tool = { 141 | name: "sequentialthinking", 142 | description: `A detailed tool for dynamic and reflective problem-solving through thoughts. 143 | This tool helps analyze problems through a flexible thinking process that can adapt and evolve. 144 | Each thought can build on, question, or revise previous insights as understanding deepens. 145 | 146 | When to use this tool: 147 | - Breaking down complex problems into steps 148 | - Planning and design with room for revision 149 | - Analysis that might need course correction 150 | - Problems where the full scope might not be clear initially 151 | - Problems that require a multi-step solution 152 | - Tasks that need to maintain context over multiple steps 153 | - Situations where irrelevant information needs to be filtered out 154 | 155 | Key features: 156 | - You can adjust total_thoughts up or down as you progress 157 | - You can question or revise previous thoughts 158 | - You can add more thoughts even after reaching what seemed like the end 159 | - You can express uncertainty and explore alternative approaches 160 | - Not every thought needs to build linearly - you can branch or backtrack 161 | - Generates a solution hypothesis 162 | - Verifies the hypothesis based on the Chain of Thought steps 163 | - Repeats the process until satisfied 164 | - Provides a correct answer 165 | 166 | Parameters explained: 167 | - thought: Your current thinking step, which can include: 168 | * Regular analytical steps 169 | * Revisions of previous thoughts 170 | * Questions about previous decisions 171 | * Realizations about needing more analysis 172 | * Changes in approach 173 | * Hypothesis generation 174 | * Hypothesis verification 175 | - next_thought_needed: True if you need more thinking, even if at what seemed like the end 176 | - thought_number: Current number in sequence (can go beyond initial total if needed) 177 | - total_thoughts: Current estimate of thoughts needed (can be adjusted up/down) 178 | - is_revision: A boolean indicating if this thought revises previous thinking 179 | - revises_thought: If is_revision is true, which thought number is being reconsidered 180 | - branch_from_thought: If branching, which thought number is the branching point 181 | - branch_id: Identifier for the current branch (if any) 182 | - needs_more_thoughts: If reaching end but realizing more thoughts needed 183 | 184 | You should: 185 | 1. Start with an initial estimate of needed thoughts, but be ready to adjust 186 | 2. Feel free to question or revise previous thoughts 187 | 3. Don't hesitate to add more thoughts if needed, even at the "end" 188 | 4. Express uncertainty when present 189 | 5. Mark thoughts that revise previous thinking or branch into new paths 190 | 6. Ignore information that is irrelevant to the current step 191 | 7. Generate a solution hypothesis when appropriate 192 | 8. Verify the hypothesis based on the Chain of Thought steps 193 | 9. Repeat the process until satisfied with the solution 194 | 10. Provide a single, ideally correct answer as the final output 195 | 11. Only set next_thought_needed to false when truly done and a satisfactory answer is reached`, 196 | inputSchema: { 197 | type: "object", 198 | properties: { 199 | thought: { 200 | type: "string", 201 | description: "Your current thinking step" 202 | }, 203 | nextThoughtNeeded: { 204 | type: "boolean", 205 | description: "Whether another thought step is needed" 206 | }, 207 | thoughtNumber: { 208 | type: "integer", 209 | description: "Current thought number (numeric value, e.g., 1, 2, 3)", 210 | minimum: 1 211 | }, 212 | totalThoughts: { 213 | type: "integer", 214 | description: "Estimated total thoughts needed (numeric value, e.g., 5, 10)", 215 | minimum: 1 216 | }, 217 | isRevision: { 218 | type: "boolean", 219 | description: "Whether this revises previous thinking" 220 | }, 221 | revisesThought: { 222 | type: "integer", 223 | description: "Which thought is being reconsidered", 224 | minimum: 1 225 | }, 226 | branchFromThought: { 227 | type: "integer", 228 | description: "Branching point thought number", 229 | minimum: 1 230 | }, 231 | branchId: { 232 | type: "string", 233 | description: "Branch identifier" 234 | }, 235 | needsMoreThoughts: { 236 | type: "boolean", 237 | description: "If more thoughts are needed" 238 | } 239 | }, 240 | required: ["thought", "nextThoughtNeeded", "thoughtNumber", "totalThoughts"] 241 | } 242 | }; 243 | 244 | const server = new Server( 245 | { 246 | name: "sequential-thinking-server", 247 | version: "0.2.0", 248 | }, 249 | { 250 | capabilities: { 251 | tools: {}, 252 | }, 253 | } 254 | ); 255 | 256 | const thinkingServer = new SequentialThinkingServer(); 257 | 258 | server.setRequestHandler(ListToolsRequestSchema, async () => ({ 259 | tools: [SEQUENTIAL_THINKING_TOOL], 260 | })); 261 | 262 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 263 | if (request.params.name === "sequentialthinking") { 264 | return thinkingServer.processThought(request.params.arguments); 265 | } 266 | 267 | return { 268 | content: [{ 269 | type: "text", 270 | text: `Unknown tool: ${request.params.name}` 271 | }], 272 | isError: true 273 | }; 274 | }); 275 | 276 | async function runServer() { 277 | const transport = new StdioServerTransport(); 278 | await server.connect(transport); 279 | console.error("Sequential Thinking MCP Server running on stdio"); 280 | } 281 | 282 | runServer().catch((error) => { 283 | console.error("Fatal error running server:", error); 284 | process.exit(1); 285 | }); 286 | -------------------------------------------------------------------------------- /src/sequentialthinking/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-sequential-thinking", 3 | "version": "0.6.2", 4 | "description": "MCP server for sequential thinking and problem solving", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-sequential-thinking": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "0.5.0", 23 | "chalk": "^5.3.0", 24 | "yargs": "^17.7.2" 25 | }, 26 | "devDependencies": { 27 | "@types/node": "^22", 28 | "@types/yargs": "^17.0.32", 29 | "shx": "^0.3.4", 30 | "typescript": "^5.3.3" 31 | } 32 | } -------------------------------------------------------------------------------- /src/sequentialthinking/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "moduleResolution": "NodeNext", 7 | "module": "NodeNext" 8 | }, 9 | "include": ["./**/*.ts"] 10 | } 11 | -------------------------------------------------------------------------------- /src/time/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/time/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # Set the LOCAL_TIMEZONE environment variable 36 | ENV LOCAL_TIMEZONE=${LOCAL_TIMEZONE:-"UTC"} 37 | 38 | # when running the container, add --local-timezone and a bind mount to the host's db file 39 | ENTRYPOINT ["mcp-server-time", "--local-timezone", "${LOCAL_TIMEZONE}"] 40 | -------------------------------------------------------------------------------- /src/time/README.md: -------------------------------------------------------------------------------- 1 | # Time MCP Server 2 | 3 | A Model Context Protocol server that provides time and timezone conversion capabilities. This server enables LLMs to get current time information and perform timezone conversions using IANA timezone names, with automatic system timezone detection. 4 | 5 | ### Available Tools 6 | 7 | - `get_current_time` - Get current time in a specific timezone or system timezone. 8 | - Required arguments: 9 | - `timezone` (string): IANA timezone name (e.g., 'America/New_York', 'Europe/London') 10 | 11 | - `convert_time` - Convert time between timezones. 12 | - Required arguments: 13 | - `source_timezone` (string): Source IANA timezone name 14 | - `time` (string): Time in 24-hour format (HH:MM) 15 | - `target_timezone` (string): Target IANA timezone name 16 | 17 | ## Installation 18 | 19 | ### Using uv (recommended) 20 | 21 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 22 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-time*. 23 | 24 | ### Using PIP 25 | 26 | Alternatively you can install `mcp-server-time` via pip: 27 | 28 | ```bash 29 | pip install mcp-server-time 30 | ``` 31 | 32 | After installation, you can run it as a script using: 33 | 34 | ```bash 35 | python -m mcp_server_time 36 | ``` 37 | 38 | ## Configuration 39 | 40 | ### Configure for Claude.app 41 | 42 | Add to your Claude settings: 43 | 44 |
45 | Using uvx 46 | 47 | ```json 48 | { 49 | "mcpServers": { 50 | "time": { 51 | "command": "uvx", 52 | "args": ["mcp-server-time"] 53 | } 54 | } 55 | } 56 | ``` 57 |
58 | 59 |
60 | Using docker 61 | 62 | ```json 63 | { 64 | "mcpServers": { 65 | "time": { 66 | "command": "docker", 67 | "args": ["run", "-i", "--rm", "-e", "LOCAL_TIMEZONE", "mcp/time"] 68 | } 69 | } 70 | } 71 | ``` 72 |
73 | 74 |
75 | Using pip installation 76 | 77 | ```json 78 | { 79 | "mcpServers": { 80 | "time": { 81 | "command": "python", 82 | "args": ["-m", "mcp_server_time"] 83 | } 84 | } 85 | } 86 | ``` 87 |
88 | 89 | ### Configure for Zed 90 | 91 | Add to your Zed settings.json: 92 | 93 |
94 | Using uvx 95 | 96 | ```json 97 | "context_servers": [ 98 | "mcp-server-time": { 99 | "command": "uvx", 100 | "args": ["mcp-server-time"] 101 | } 102 | ], 103 | ``` 104 |
105 | 106 |
107 | Using pip installation 108 | 109 | ```json 110 | "context_servers": { 111 | "mcp-server-time": { 112 | "command": "python", 113 | "args": ["-m", "mcp_server_time"] 114 | } 115 | }, 116 | ``` 117 |
118 | 119 | ### Configure for VS Code 120 | 121 | For quick installation, use one of the one-click install buttons below... 122 | 123 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D&quality=insiders) 124 | 125 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D&quality=insiders) 126 | 127 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 128 | 129 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 130 | 131 | > Note that the `mcp` key is needed when using the `mcp.json` file. 132 | 133 |
134 | Using uvx 135 | 136 | ```json 137 | { 138 | "mcp": { 139 | "servers": { 140 | "time": { 141 | "command": "uvx", 142 | "args": ["mcp-server-time"] 143 | } 144 | } 145 | } 146 | } 147 | ``` 148 |
149 | 150 |
151 | Using Docker 152 | 153 | ```json 154 | { 155 | "mcp": { 156 | "servers": { 157 | "time": { 158 | "command": "docker", 159 | "args": ["run", "-i", "--rm", "mcp/time"] 160 | } 161 | } 162 | } 163 | } 164 | ``` 165 |
166 | 167 | ### Configure for Zencoder 168 | 169 | 1. Go to the Zencoder menu (...) 170 | 2. From the dropdown menu, select `Agent Tools` 171 | 3. Click on the `Add Custom MCP` 172 | 4. Add the name and server configuration from below, and make sure to hit the `Install` button 173 | 174 |
175 | Using uvx 176 | 177 | ```json 178 | { 179 | "command": "uvx", 180 | "args": ["mcp-server-time"] 181 | } 182 | ``` 183 |
184 | 185 | ### Customization - System Timezone 186 | 187 | By default, the server automatically detects your system's timezone. You can override this by adding the argument `--local-timezone` to the `args` list in the configuration. 188 | 189 | Example: 190 | ```json 191 | { 192 | "command": "python", 193 | "args": ["-m", "mcp_server_time", "--local-timezone=America/New_York"] 194 | } 195 | ``` 196 | 197 | ## Example Interactions 198 | 199 | 1. Get current time: 200 | ```json 201 | { 202 | "name": "get_current_time", 203 | "arguments": { 204 | "timezone": "Europe/Warsaw" 205 | } 206 | } 207 | ``` 208 | Response: 209 | ```json 210 | { 211 | "timezone": "Europe/Warsaw", 212 | "datetime": "2024-01-01T13:00:00+01:00", 213 | "is_dst": false 214 | } 215 | ``` 216 | 217 | 2. Convert time between timezones: 218 | ```json 219 | { 220 | "name": "convert_time", 221 | "arguments": { 222 | "source_timezone": "America/New_York", 223 | "time": "16:30", 224 | "target_timezone": "Asia/Tokyo" 225 | } 226 | } 227 | ``` 228 | Response: 229 | ```json 230 | { 231 | "source": { 232 | "timezone": "America/New_York", 233 | "datetime": "2024-01-01T12:30:00-05:00", 234 | "is_dst": false 235 | }, 236 | "target": { 237 | "timezone": "Asia/Tokyo", 238 | "datetime": "2024-01-01T12:30:00+09:00", 239 | "is_dst": false 240 | }, 241 | "time_difference": "+13.0h", 242 | } 243 | ``` 244 | 245 | ## Debugging 246 | 247 | You can use the MCP inspector to debug the server. For uvx installations: 248 | 249 | ```bash 250 | npx @modelcontextprotocol/inspector uvx mcp-server-time 251 | ``` 252 | 253 | Or if you've installed the package in a specific directory or are developing on it: 254 | 255 | ```bash 256 | cd path/to/servers/src/time 257 | npx @modelcontextprotocol/inspector uv run mcp-server-time 258 | ``` 259 | 260 | ## Examples of Questions for Claude 261 | 262 | 1. "What time is it now?" (will use system timezone) 263 | 2. "What time is it in Tokyo?" 264 | 3. "When it's 4 PM in New York, what time is it in London?" 265 | 4. "Convert 9:30 AM Tokyo time to New York time" 266 | 267 | ## Build 268 | 269 | Docker build: 270 | 271 | ```bash 272 | cd src/time 273 | docker build -t mcp/time . 274 | ``` 275 | 276 | ## Contributing 277 | 278 | We encourage contributions to help expand and improve mcp-server-time. Whether you want to add new time-related tools, enhance existing functionality, or improve documentation, your input is valuable. 279 | 280 | For examples of other MCP servers and implementation patterns, see: 281 | https://github.com/modelcontextprotocol/servers 282 | 283 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-time even more powerful and useful. 284 | 285 | ## License 286 | 287 | mcp-server-time is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 288 | -------------------------------------------------------------------------------- /src/time/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-time" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools for time queries and timezone conversions for LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [ 8 | { name = "Mariusz 'maledorak' Korzekwa", email = "mariusz@korzekwa.dev" }, 9 | ] 10 | keywords = ["time", "timezone", "mcp", "llm"] 11 | license = { text = "MIT" } 12 | classifiers = [ 13 | "Development Status :: 4 - Beta", 14 | "Intended Audience :: Developers", 15 | "License :: OSI Approved :: MIT License", 16 | "Programming Language :: Python :: 3", 17 | "Programming Language :: Python :: 3.10", 18 | ] 19 | dependencies = [ 20 | "mcp>=1.0.0", 21 | "pydantic>=2.0.0", 22 | "tzdata>=2024.2", 23 | "tzlocal>=5.3.1" 24 | ] 25 | 26 | [project.scripts] 27 | mcp-server-time = "mcp_server_time:main" 28 | 29 | [build-system] 30 | requires = ["hatchling"] 31 | build-backend = "hatchling.build" 32 | 33 | [tool.uv] 34 | dev-dependencies = [ 35 | "freezegun>=1.5.1", 36 | "pyright>=1.1.389", 37 | "pytest>=8.3.3", 38 | "ruff>=0.8.1", 39 | ] 40 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Time Server - Time and timezone conversion functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to handle time queries and timezone conversions" 11 | ) 12 | parser.add_argument("--local-timezone", type=str, help="Override local timezone") 13 | 14 | args = parser.parse_args() 15 | asyncio.run(serve(args.local_timezone)) 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__main__.py: -------------------------------------------------------------------------------- 1 | from mcp_server_time import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/server.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from enum import Enum 3 | import json 4 | from typing import Sequence 5 | 6 | from zoneinfo import ZoneInfo 7 | from tzlocal import get_localzone_name # ← returns "Europe/Paris", etc. 8 | 9 | from mcp.server import Server 10 | from mcp.server.stdio import stdio_server 11 | from mcp.types import Tool, TextContent, ImageContent, EmbeddedResource 12 | from mcp.shared.exceptions import McpError 13 | 14 | from pydantic import BaseModel 15 | 16 | 17 | class TimeTools(str, Enum): 18 | GET_CURRENT_TIME = "get_current_time" 19 | CONVERT_TIME = "convert_time" 20 | 21 | 22 | class TimeResult(BaseModel): 23 | timezone: str 24 | datetime: str 25 | day_of_week: str 26 | is_dst: bool 27 | 28 | 29 | class TimeConversionResult(BaseModel): 30 | source: TimeResult 31 | target: TimeResult 32 | time_difference: str 33 | 34 | 35 | class TimeConversionInput(BaseModel): 36 | source_tz: str 37 | time: str 38 | target_tz_list: list[str] 39 | 40 | 41 | def get_local_tz(local_tz_override: str | None = None) -> ZoneInfo: 42 | if local_tz_override: 43 | return ZoneInfo(local_tz_override) 44 | 45 | # Get local timezone from datetime.now() 46 | local_tzname = get_localzone_name() 47 | if local_tzname is not None: 48 | return ZoneInfo(local_tzname) 49 | raise McpError("Could not determine local timezone - tzinfo is None") 50 | 51 | 52 | def get_zoneinfo(timezone_name: str) -> ZoneInfo: 53 | try: 54 | return ZoneInfo(timezone_name) 55 | except Exception as e: 56 | raise McpError(f"Invalid timezone: {str(e)}") 57 | 58 | 59 | class TimeServer: 60 | def get_current_time(self, timezone_name: str) -> TimeResult: 61 | """Get current time in specified timezone""" 62 | timezone = get_zoneinfo(timezone_name) 63 | current_time = datetime.now(timezone) 64 | 65 | return TimeResult( 66 | timezone=timezone_name, 67 | datetime=current_time.isoformat(timespec="seconds"), 68 | day_of_week=current_time.strftime("%A"), 69 | is_dst=bool(current_time.dst()), 70 | ) 71 | 72 | def convert_time( 73 | self, source_tz: str, time_str: str, target_tz: str 74 | ) -> TimeConversionResult: 75 | """Convert time between timezones""" 76 | source_timezone = get_zoneinfo(source_tz) 77 | target_timezone = get_zoneinfo(target_tz) 78 | 79 | try: 80 | parsed_time = datetime.strptime(time_str, "%H:%M").time() 81 | except ValueError: 82 | raise ValueError("Invalid time format. Expected HH:MM [24-hour format]") 83 | 84 | now = datetime.now(source_timezone) 85 | source_time = datetime( 86 | now.year, 87 | now.month, 88 | now.day, 89 | parsed_time.hour, 90 | parsed_time.minute, 91 | tzinfo=source_timezone, 92 | ) 93 | 94 | target_time = source_time.astimezone(target_timezone) 95 | source_offset = source_time.utcoffset() or timedelta() 96 | target_offset = target_time.utcoffset() or timedelta() 97 | hours_difference = (target_offset - source_offset).total_seconds() / 3600 98 | 99 | if hours_difference.is_integer(): 100 | time_diff_str = f"{hours_difference:+.1f}h" 101 | else: 102 | # For fractional hours like Nepal's UTC+5:45 103 | time_diff_str = f"{hours_difference:+.2f}".rstrip("0").rstrip(".") + "h" 104 | 105 | return TimeConversionResult( 106 | source=TimeResult( 107 | timezone=source_tz, 108 | datetime=source_time.isoformat(timespec="seconds"), 109 | day_of_week=source_time.strftime("%A"), 110 | is_dst=bool(source_time.dst()), 111 | ), 112 | target=TimeResult( 113 | timezone=target_tz, 114 | datetime=target_time.isoformat(timespec="seconds"), 115 | day_of_week=target_time.strftime("%A"), 116 | is_dst=bool(target_time.dst()), 117 | ), 118 | time_difference=time_diff_str, 119 | ) 120 | 121 | 122 | async def serve(local_timezone: str | None = None) -> None: 123 | server = Server("mcp-time") 124 | time_server = TimeServer() 125 | local_tz = str(get_local_tz(local_timezone)) 126 | 127 | @server.list_tools() 128 | async def list_tools() -> list[Tool]: 129 | """List available time tools.""" 130 | return [ 131 | Tool( 132 | name=TimeTools.GET_CURRENT_TIME.value, 133 | description="Get current time in a specific timezones", 134 | inputSchema={ 135 | "type": "object", 136 | "properties": { 137 | "timezone": { 138 | "type": "string", 139 | "description": f"IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no timezone provided by the user.", 140 | } 141 | }, 142 | "required": ["timezone"], 143 | }, 144 | ), 145 | Tool( 146 | name=TimeTools.CONVERT_TIME.value, 147 | description="Convert time between timezones", 148 | inputSchema={ 149 | "type": "object", 150 | "properties": { 151 | "source_timezone": { 152 | "type": "string", 153 | "description": f"Source IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no source timezone provided by the user.", 154 | }, 155 | "time": { 156 | "type": "string", 157 | "description": "Time to convert in 24-hour format (HH:MM)", 158 | }, 159 | "target_timezone": { 160 | "type": "string", 161 | "description": f"Target IANA timezone name (e.g., 'Asia/Tokyo', 'America/San_Francisco'). Use '{local_tz}' as local timezone if no target timezone provided by the user.", 162 | }, 163 | }, 164 | "required": ["source_timezone", "time", "target_timezone"], 165 | }, 166 | ), 167 | ] 168 | 169 | @server.call_tool() 170 | async def call_tool( 171 | name: str, arguments: dict 172 | ) -> Sequence[TextContent | ImageContent | EmbeddedResource]: 173 | """Handle tool calls for time queries.""" 174 | try: 175 | match name: 176 | case TimeTools.GET_CURRENT_TIME.value: 177 | timezone = arguments.get("timezone") 178 | if not timezone: 179 | raise ValueError("Missing required argument: timezone") 180 | 181 | result = time_server.get_current_time(timezone) 182 | 183 | case TimeTools.CONVERT_TIME.value: 184 | if not all( 185 | k in arguments 186 | for k in ["source_timezone", "time", "target_timezone"] 187 | ): 188 | raise ValueError("Missing required arguments") 189 | 190 | result = time_server.convert_time( 191 | arguments["source_timezone"], 192 | arguments["time"], 193 | arguments["target_timezone"], 194 | ) 195 | case _: 196 | raise ValueError(f"Unknown tool: {name}") 197 | 198 | return [ 199 | TextContent(type="text", text=json.dumps(result.model_dump(), indent=2)) 200 | ] 201 | 202 | except Exception as e: 203 | raise ValueError(f"Error processing mcp-server-time query: {str(e)}") 204 | 205 | options = server.create_initialization_options() 206 | async with stdio_server() as (read_stream, write_stream): 207 | await server.run(read_stream, write_stream, options) 208 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "strict": true, 7 | "esModuleInterop": true, 8 | "skipLibCheck": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "resolveJsonModule": true 11 | }, 12 | "include": ["src/**/*"], 13 | "exclude": ["node_modules"] 14 | } 15 | -------------------------------------------------------------------------------- HoME日韩欧美变态无码一级在线视频 ENTER NUMBET 007
灏忕啓鍏富澶鍗庝负mp4 鍚堥泦 鍥戒骇 涓嬭浇 瀚 娉㈠閲庣粨琛h祫鏂 鐗涢蓟鑹 鎬х埍婵鎯呬簹娲插浘鐗 涓婂濡瑰奖闄㈡垚浜虹墖鍦 寰堝緢鎾镐贡璁哄皬璇 娆х編灏婚 閮藉競婵鎯呯郴鍒楃墖 abcdduppid1 bt鑹插姩婕 鍥戒骇涓夊溇鐗囧湪鍝噷瑙傜湅 鐢佃溅鍧愮埍鎯呰棰 濡瑰鍕惧紩鍝ュ摜 鍥戒骇50璺ぇ楠氬コ缃 浣愯棨閬ュ笇鐨勬姢澹仛鐖卞摕闊冲奖鍏堥攱 www90porn 鏉ユ彃濡瑰鐨勫皬绌 濠跺┒鐨勯粦閫 瀹跺涵涔变鸡鎬v 鑽夎缇庡コ浜轰綋鍐欑湡 娣辩敯鎭瓙鐖界墖 澶鐖辩埍蹇挱鐢靛奖 shaofuzipai 闊╁浗鏈夋垚浜洪粍鑹插浘鐗囧悧 鐔熼煎浘鐗 濮愬鑹 娌佷僵鍎垮浘鐗 濂充汉鎵嬫粵鏁欏 璨岀編濂跺ぇ鐨勬ф劅缇庡コ涓鐩翠笉鍋滅殑鎻夋崗澶уザ瀛031616 涔变鸡楦″反灏忚 灏戠埛 闆 寮鑻 鏃ユ湰澶у弸缁 鐙傚共鍐滄潙灏戝 娆х編楂樿窡涓濊瓒充氦 鏋佸搧灏戝コ鑷叞鍥剧墖 榛勮搲de涓嬬┐ 浠栧窞鑹插浘 瓒呭ぇ鑳嗘ф劅缇庡コ鍥剧墖 娣鑹茶 鍋锋媿涓夎褰bc 婵鎯呯埍鐖卞浘鎴愪汉 鎴愪汉浜氭床蹇挱 濂跺ぇ楠氭按瀵圭櫧娓呮櫚 浜氭床鑷媿鎾稿奖闊冲厛閿 8899uwwcom 瀛曞闃存矡 mmypus alaka 灏忛奔鍎夸笌鑺辨棤缂轰富棰樻洸 f12012璧涚▼ 骞挎苯toyota jiying 绾归摱 寮犲槈姹 棣欐按鐢靛奖褰辫瘎 鍡峰椃鎻掕佸﹩鍡峰椃鍙搷閫 鎬х埍鍔炲叕瀹ら珮娓 澶ф尝鎾稿奖闊 闊╁浗灏戝15p 鏃ラ煩缇庡コ瑁镐綋鑷媿鑹烘湳鐓 WWW_YEIUBA_COM 闈掑窞浜屼腑瀛︾敓鍦ㄦ暀瀹ら噷鍋氱埍 鍠滅埍鑲夋鐨勬帆鑽℃瘝浜叉缇庢矙rmvb 鏃ユ湰缇庡コ姣旀瘮浜轰綋鑹烘湳鍥剧墖 灏忓鎬т氦瑙嗛 灞辫タ婵鎯呴粍鑹叉т氦甯︽垚浜烘縺鎯 灏戝鑹崇┐ www18ricom 濂充汉涓庡叕鐙椾氦閰嶈棰 娆ф床缇庡コ鎬т氦鐨勫奖鐗 缇庡コ濮撶埍鍥剧墖 灏戝绉樹功鑲変綋浼轰警鑰佹澘鎬т氦灏忚 瑗胯タpian30398 鑹插鑸壊鐙煎奖闄 鎬х埍褰曢煶鍚 鏈夊叧娆х編鑹插浘 鎯虫搷閫间簡鎬庝箞鍔 a鐗囨按娴 94鑽塨 鑷媿鍋蜂簹娲茶壊鍥惧悏鍚夊奖闊 鎻掑皠鍥剧墖鍖 鎴戠埍涓濊璧よ8鑹插 鏃ユ湰濂界湅鐨刟v濂虫槑鏄 杩峰ジ濡瑰褰遍櫌 娲楁尽鍓嶅厛鍚冧紵鍝 鎻掑叆灏忓Ж瀛愬渤姣嶅皬绌寸數褰 鏃ユ湰鐔熷涔变鸡鑷媿瑙嗛 浜哄鑹插浘2 WWW_JIYOUZZ_COM 娆х編瑙嗛鐧界櫧鑹 闃跨墖鏃犻檺鐪 yazhouwuwuma 宀抽槼鍘胯崳瀹舵咕涓滈璺殑濡撻櫌鎵撶偖瑕佸ソ澶氶挶 av閲屾槸鐪熷悆绮 鑻卞浗缇庡浗浜轰綋鑹烘湳闇蹭笅浣 濂充汉鐨勫垰闂ㄦ湁澶氭繁 d36471a80002640a 钀濊帀av浼樼癄宕庣埍 鑷媿鍋锋媿涔变鸡灏忚鍥剧墖 浜氭床绯诲垪鍘诲共缃 澶ц儐浜轰綋鑹烘湳鎽勫奖椴 澶勫コ濂介艰繛杩炵湅鍥惧浘 涓嬩綋灞閮ㄤ汉浣撹壓鏈憚褰 浜旀湀澶╂捀鎾稿浘鐗 濂充汉涓庣嫍鏂 xiaonvhaizuoaigushi 鍜岀啛濂虫х埍瑙嗛 鐪湳鑹插惂 浜旀湀澶╄壊浜旀湀绗洓鑹 www777褰遍煶鍏堥攱 濯涘獩鐨勫彉鎬佽佸叕缁垮矝鐢靛奖闄 鎻掑効瀛愯佸﹩姣斿皬璇 浜哄鍦ㄥ簥涓婂共濡瑰 鍧涜湝鎬х埍鍥 杩呴浄澶╁ぉ鑹茬患鍚堝尯 寰峰浗浜轰笌鍏戒氦閰 鑹叉挱瀵艰埅缃戠珯 鐖辩埍甯濆浗閽熷拰鍖哄浘鐗 鐜嬫櫠榛勮壊鐢靛奖 boboxxoo 鐚壊缁煎悎缃憅vod 缇庣孩濠 zhifusiwatupian 鎭嬪褰遍櫌绐佺劧鎵撲笉寮 闊╁浗绁為┈褰遍櫌鐢靛奖 鏈婕備寒鐨勬尝鍥剧墖 鎵句釜鑳界湅榛勭墖鐨勭綉绔 鐖芥姝粍鑹茬數褰辩患鍚堢綉 閾惰充腹鍙佺孩鏃﹝ 姹ゆ柟鎴块棿澶ц儐绉绉佸 鑹叉偁鎮犵患鍚堝奖闄笅杞 鎴愪汉缃戝潃涓轰粈涔堢湅涓嶄簡鐢靛奖 鍎垮瓙鎽稿濡堥槾閮ㄨ棰 鎴愪汉鎬х埍鍥剧墖缃戝潃 鑰佺啛濡3p鐨勬晠浜 t56浜轰綋绯昏壓浜烘憚褰卞浘鐗 WWWDAONOUCOM 绌哄琚粦绀句細鎬т镜鐘浘鐗 婵鎯呮х埍30p jjvod涓. 鎴愪汉缇庡コ璐村浘鍖 www8se9se WWWCCAVDCOM 鎴愪汉瀹楀悎 鍏浜ゆ崲涔变鸡 ppp36瀹樻柟缃戠珯 鍏堥攱褰遍煶鏃犵爜鑷叞鐢靛奖 鑸掓穱鐨勬т箰瓒 缇庡浗鍏嶈垂榛勮壊鐢靛奖 榛勪功15p 蹇挱鍋氱埍p 娣卞鑽夐煎奖闄 榛勮壊灏忚鍥剧墖鐢靛奖鎵嬫満鍏嶈垂鍦ㄧ嚎瑙傜湅 缇庡皯濂虫濂115 鎴戠埍鐪嬬墖瀹夊叏鍚 瑙嗛涓嬭浇 娣細鐗 77diecn 闈掗潚闈掕鐪嬪ぇ灞岃壊 鍏徃鏈篧WW鏃ユ湰榛勮壊 鏃ユ棩骞叉棩鏃ユ媿 h涓冧竷WWw52AVAVC0m 涓濊椋庨獨鐔熷コ 娉㈠鎮犳偁 922bbbcom寮熶竴闆 铏庣墮huangsedianying 灏忎紮鎿嶇啛濂 瑁镐綋缇庡コp鐓х墖 鍏叕蹇嶄笉浣忕儹鍚诲効濯冲宸ㄤ钩 楠氳佸お澶贡浼 apianwang 鐖辫湝妗冩牎鍥槬鑹叉縺鎯呭皬璇 鎬$孩闄221aacom221aacom 澶滃皠鐙糀V浜氭床鍦ㄧ嚎瑙嗛 缃戦〉鏃犳硶璁块棶dddd94com 涔呬箙鎾告捀绮惧搧 椴亁xoo 楹荤編鐢辩湡鍏嶈垂瑙傜湅pleddctbszcn 缃戝弸鍋锋媿澶ч鐒 澶т笢鏂逛笁绾х墖鍦ㄧ嚎瑙傜湅 鏈綉绔欐渶鏂拌幏鍙 灏戝コ绮夐矋b鑹烘湳鍥剧墖 浜氭床鐢靛奖娆х編鍦ㄧ嚎www080xxcom 娆х編鐢峰悓瑙嗛鐢靛奖缃戠珯 鑻嶄簳绌烘帆涔卞 鏃ユ湰灏戝椋庨獨澶т絾闇插嚭bb鍥 鑹剧帥娌冪壒妫悎鎴愬浘 8wawacomwww8wawacom 鐢ㄥ姏骞叉彃浜哄鍔炲叕瀹 1024閲庣嫾绀惧尯 澶滃鎾稿嬀寮曢獨濂充簹娲 鏍″洯鎯卻e 鏄ヨ壊闃佹ゼ鎵嬫満鍦ㄧ嚎 姣嬪瓙杞贡wwwhuiyang168com 鎻掑叆b閲屼娇鍔叉搷 wwwyoujizzcom浜哄 鐔熷鍦ㄥ鑷媿鍥剧墖 鎴戠殑楂樼骇瀹跺涵濂虫暀甯 涓滃寳濡炲拰鍝ュ摜婵鎯 娑╂订鐖辨订娑 浜氭床鍦ㄧ嚎瑙嗛12avaaaaaaa 涓夌骇榛勮壊澶ф尝 涔呬箙鐑湪绾胯棰99鑹插濮 濂跺ぇ鐨勭編濂宠佸﹩琚汉鍐呭皠 榛勮搲娣紶 铏氭笂鐜勬姳澶ф墜鍔炲浘 鐭ヤ赴婊¢獨濂充汉婵鎯 鑹叉垝澶╀娇鍦ㄩ挶瑙傜湅 鏃ユ湰浣撴搷ed2k 涔虫埧鐪熶汉鏍峰搧灞曞嚭榛勮壊缃戝厤璐瑰湪绾块粍鑹茬數褰遍粍鑹叉垚浜哄揩鎾數褰变鸡鐞嗙數褰遍粍 浜氭床鑲岃倝鐢穚orn 鑹茶壊鑹茬綉鍋氱埍鎬х敓娲昏棰 sexsss 鐖嗕钩濂虫暀甯坋d2k 瀹濊礉寮蹇冩垚浜 闈掗潚鑽夋垚骞撮粍鑹茬綉绔 鐜嬫潕涓瑰Ξ鐨勮湝妗 鍋锋媿鑷媿鍒嗕韩瑙嗛wwwooxx5comwwwooxxbbcom 鑰佺啛濡堢湡瀹為湶鑴 鑷叞鐖辩埍鐢靛奖 鎵嬫満鐢靛奖鏃ユ湰鎴愪汉ccc 鑰佹瘝娣 涔变鸡鐢靛奖鎵嬫満鍦ㄧ嚎瑙傜湅缃 瑁搁槾閮ㄥコ妯℃堡fang 瑁镐綋濂充笅涓绡 蹇挱娆ф床鎴愪汉濂楀浘缃 鎿嶅紶闈欑殑閫 鎴愪汉鐗囧笣鍥 鍏嶈垂缃戠珯浣犱滑鎳傚緱wwwlu2310com 涓ゅ彛瀛恈gaobi瑙嗛 寮哄ジ鏌冲博鐨勬晠浜 娓彴缇庡コ鍋氱埍 缇屽コ闃撮儴澶у叏缇16P鎬х埍鍥剧墖 鑷媿鍖哄伔鎷嶅尯浜氭床鍖簑wwsggw99comwwwranshaocom 010鍚勫浗缇庡コ椴嶇編 www502鏃ユ湰灏戝com 鏃ユ湰浜烘坊閫奸艰灞 闊╁浗鏃ユ湰鍋锋媿榛勮壊褰曞儚 avsm鍔ㄧ墿 绾㈡潖缁煎悎 瀹舵棌涔变鸡鍋氱埍灏忚 骞煎コ骞奸娇浜氭床鏃犵爜 鍋锋媿鑷伔浜哄鑷叞 鏃犲鎴风鎵嬫満鐪媋鐗 鎾镐竴涓嬪摜缃 鏃ユ垚浜哄浘鐗 鍙互鍏嶈垂鐪嬮粍鐗囩殑缃戠珯 鍚翠簹棣ㄦ潕瀹楃憺鍏ㄩ泦瀹屾暣鐗堟潕瀹楃憺9灏忔椂45鍒59绉275g鍘嬬缉鐗堟祦鍑 鎬уゴ闊╁浗鐢靛奖 蹇欓噷鍋烽棽鎿嶇編濂宠瑙嗛 鏄庢槦娣贡缇庡眲 娓呭鎬у彶鍏嶈垂瑙傜湅 7m瑙嗛鎴愪汉鐩存挱鍦ㄧ嚎瑙傜湅鍗堝绂忓埄瑙嗛 浜氭床鏃犵爜鍦ㄧ嚎瑙嗛鍖 pgd720 淇勭綏鏂編濂冲浘涔﹂ 绾㈢暘闃佹墜鏈哄湪绾胯鐪媤wwhongfange888net 鏃ユ湰鐔熷コ鍥涘崄璺 鐢风敺浜掓棩灏忚 鑷媿av灏勫皠 浼︾悊鐞惇褰遍櫌a 浜氭床鍋氱埌濂楀浘 浜哄Xx鍥剧墖 btAv澶╁爞缃 娣鑽$縼鐨勬х敓娲荤埍鐖 鍋锋媿骞煎コ鎬т氦 鏃ラ煩鎴愪汉鑹茶创鍥 www644secom 鍥戒骇涔辫浜哄吔 瀚╂ā涓庣偒瀵屽コ瀵硅祵 鑷媿鎿嶆墦鐢佃瘽鐨勫ぇ闄嗗コ浜 wwwsesekongcom 鐢诲AV 884cno 姹熸尝銈娿倕銇嗗湪绾挎挱鏀 涓滄av澶╁爞浜氭床娆х編 鐚挭鎴愪汉瑙嗛鍏嶈垂 浣忛櫌鏃剁殑鎴戠媯鎻掓潵闅斿 wwwwoyaolucom jlzz鎬瀛 鍛﹀懄瑙嗗睆 鍖楀師澶氶瀛愬叏闆嗙瀛 鍏充簬鏃楄涓濊鐨勪鸡鐞嗙墖 3DAVmp4 99鐑箙涔呮搷骞茬嫚鐙 濂藉悐鏃ュ厛閿 缇庡浗鎴愬勾鍏嶈垂缃戠珯 www鍙ゅ吀姝︿緺鐙犵嫚 鐚庤壋鍗堝鍥剧墖鎵嬫満涓嬭浇 娣崱濂冲弸鏍″洯 94鎾告捀 浜轰汉鐪嬩汉浜烘搷浜轰汉鎽 灏忔帆铏 閭伓婕敾涔嬭佸笀 瀹夊Ξ璐濇媺鑰佺啛濡囨16P 灏忓鐢熻8鐓 wwwuuuu28com 鍘曟墍鍋锋媿浜氭床娆х編 鏃ユ湰楠氬鎬т氦瑙嗛 鍏嶈垂鍦ㄧ嚎瑙傜湅娆х編鎴愪汉 浜哄绂忓埄 澶滃骞插m wwwll777 鍋锋鎿嶄汉濡昏棰 鐖辨悶閫奸肩患鍚圶鍥 澶уЖ瀛愬伔娆 鎬х埍浜轰汉鎿嶄汉浜虹湅鍏嶈垂瑙嗛鎵嬫満鍦ㄧ嚎 A鐗囬粍鑹插皬璇寸綉 娣贡褰遍煶鎬х埍 h涓冧竷p涓僣n 鑹插凹濮戠綉浜氬窞绗竴瑙嗛 浼婁汉缃戝湪绾胯棰 鏇煎鍥炲繂褰曞叏鏂 鎯崇湅澶ч浮宸村共楠氳揣鐨勮棰戜笂浼犺佸徃鏈 www575avC0m 鏃ユ湰鏃犵爜浜轰汉鑹插湪绾胯棰 涓夌骇鐗囧崡浜ぇ灞犳潃鐢靛奖瀹屾暣鐗 鎿嶆垜鍦ㄧ嚎瑙傜湅涓瓧 wwwanquyue5 灏戝鑷叞缁5宀佸効瀛愬彛 娣闄嶄复 涓庨潚闈掕崏鐩镐技 涔呬箙鐑 妫氭埛鍖虹珯琛楀コvod 790鎴愪汉鎾哥墖 鍘诲共缃戞病鏈変簡鎬庝箞鍔 鑷媿鍋锋媿浜氭床鑹插浘鑹茬礌 476avcom 婵鎯匴缃 a4u鍏ㄩ泦av 鍚夊悏骞煎コ m3344tt 寮烘搷 鍔虫媺鍋氱埍鍥 浜氭床鏈夌爜瑙嗛 婵鎯卪mav 缁忓吀琛楁媿鍋锋媿 鎶芥彃鎽镐钩灏勫搴贡浼 缇庡瓙涓庡叕鐚潅浜や骇瀛 鍋氱埍鐨勭綉鍧鏄灏 鐢蜂负濂冲彛浜ゅ浘鐗囧煿璁 鐔熷コ寮哄ジ鍦ㄧ嚎瑙嗛 鑻嶄簳绌虹鍔涢摼杩 濂虫槑鏄熸縺鎯呯數褰变笅杞 鑹查叿鑹瞞p4 鑻辫缈昏瘧 av12鐩存挱鍦ㄧ嚎 鏃犵爜鍦ㄧ嚎va鏃ラ煩 闊╂棩鍦ㄧ嚎AV 娆х編鎬х埍wwwitcwwww78gncom 444ee 涓鏈亾鑰佺啛濂 鍦ㄧ嚎鐪嬬殑鍏嶈垂鎴愪汉缃戠珯 WW11AABBCOM 2048绀惧尯 swww22bbmmcom japanese濡堝涔眝idoes caopron鍦ㄧ嚎澶ч钑 vava鐨刡 鍝湁鎴愪汉3d鐢靛奖 缃戝弸鑷媿缃戝潃 鏃ユ湰骞煎コ绂忓埄 婊涜壊缁 娣鐖芥姝 鍗忓拰浼︾悊鐗囩929椤 榛勮壊aA鐗噈agnet WWWXXse 涓ょ敺涓娲炲浘瑙 AV浜氭床鑹插浘涓滄柟 鐢滃績绌哄magnet 鍝ュ摜鎴戞兂瑕乼xt瑙嗛 榛勮壊鐢靛奖涔嬫崋缁戝紡 浜轰綋鑹烘湳鎬х數褰 涓塴绾х數褰辨祹鍏 25p浜 涓栫晫鐨勭編灏戝コ鍏嶈垂鎾斁 浼婁汉澶ч钑塧v鐙间汉骞 澶ч浮宸存彃鑰佸﹩magnet seserm6org 鎾曞紑鑰佸﹩鐨勯 鍏堥攱涓瓧鏃犵爜璧勬簮 wwwjj14con 寮犳煆鑺濅汉浣撹壓鏈壋闃抽珮鐓 t58cc 娆х編浜轰綋褰╃粯鍥剧墖 椴侀瞾鑹插浘鐗囧浘鍥 鎴戣鎶芥彃鑰佸笀鐨勫皬绌撮粍鑹插皬璇 灏戝鍚堥泦灏忚 鍏ㄨ壊缃慶om 147浜轰綋鎬ц壓鏈 hongkongav 楗跨編澶ч浮宸村浘 涓濊灏戝鎶犻 濂充汉浜轰綋鑹烘湳鐓х墖 寮哄ジ骞煎コ鍏ㄦ枃鎬х埍灏忚闃呰 鍏嶈垂 鎬ф劅鐨勭編灏戝鑲忓眲 鍜屽叕鍏寷鎻 鍋氬搸缇庡コ鍥剧墖 鎶芥彃楠氬绌村浘 濂充汉涓洪┐椹厤绉 鏂板濡诲瓙鎴戠埍鎿嶄汉濡 涔濋浂鍚庡绌 WWW_SESE888_COM 鑻嶄簳绌鸿佸笀鍏ㄩ泦绉嶅瓙 鏃犵爜 鍐呭皠 绉嶅瓙 鑹插浘鐜板湪鍝兘鐪 ddd鑹插厛閿 宸磋タ涓版弧鍋氱埍 鏄熸湀銇俱倖銈 16p 鍥芥ā鍐板啺 鎻掔編濂崇殑閫 鏃ユ湪浜轰綋鍥剧墖 缇庡浗鍗佹楠氬眲鑰佸笀鏍¢暱鐢靛奖 鑷媿鎯呬汉 鍐呭皠 姝﹁棨鍏板叏瑁 鍏嶈垂澶у挭鍜綉 鐏帆蹇嶈呭皬璇磆绾叉墜 姣嶅闃垮Ж浠绘垜骞插揩鎾 骞插渤姣峚v 浜轰綋妯$壒寰愯嫢瀹 鎻掑叆闃垮Ж缃 鍥芥ā 闇查煎浘 寮哄ジ娣按鎹嗙粦 鐔婇粵鏋楄寮哄ジ灏忚 缇庣啛濂冲織鏉戠幉瀛愰樋濮 蹇挱鍦ㄧ嚎 娉㈠閲庣粨琛 90鍚庡緢钀屽コ瀛╀笂璇惧墠琚敺鍙嬫悶浜嗕竴鐐ス杩樿蹇偣瑕佽繜鍒颁簡鍥借瀵圭櫧杩呴浄涓嬭浇 鑾Ξ娌欏绉嶅瓙 鑳芥挱鏀剧殑娆х編缇や氦瑙嗛 鏃ユ湰鏋侀檺鎷充氦qvod 鏃犵爜鍙d氦鍐呭皠 mei灞 绌夸笂鍐呰。鎸轰赴婊¤劚浜嗗氨骞充簡 浜氭床楦″惂鍥剧墖 鎴戞彃楠氬濡堢┐ 缃戝弸鑷媿鍋锋媿涓嬭浇 瑗挎磱楠氳揣涔789 澶т汉鑳嗕綋 娆х編瑁镐綋楠氱敺 鏃ユ湰濂虫p鑹烘湳鐓 xingai888 濂充汉浜轰綋绌磋壓鏈 灏忔辰鐜涗附浜氱數褰卞叏闆 鍚愰瞾鐣鎴 濠氬墠鍗忚涔 缁忔祹甯堣冭瘯鎴愮哗 鍙屾洸绾垮瀷鍐峰嵈濉 鏉庣鎬 2012娴欐睙浼氳 鏉ㄧ传鐨勭敺鏈嬪弸 angelbeatsop 澶х摲鍟嗕富棰樻洸 娆х編鐢熸畺鍣ㄥ叏瑁歌壓鏈浘 姹傛х埍瑙嗛 鍏藉吔瓒充氦 寮哄ジ涔变鸡涓滀含鐑揩鎾 鑹插煙鎴愪汉缃 WWW399BBCOM 娆х編缁煎悎鑷浘 鑹叉晳鏁戠患鍚堢綉 鎴愪汉鎻夊ザ灏忚 浜ゅ獩濡硅棰 鍗堝鎴愪汉璁哄潧 娆х編澶ц偉濡炴х埍瑙嗛 椴嶉奔瀚╃┐ 涓囦汉闈犵綉绔欓粍鑹叉帆绉藉皬璇村浘鐗 鎴戣鐪嬮粦浜哄仛鐖 鍏ㄨ8浣撳ぇ鑳嗛┈鍏汉鑹烘湳缇庡コ鍥剧墖 鏃ュコ涓庡ぇ鍚婂揩鎾 娆х編棰滃皠涔嬭壊娌抽┈ 骞茶佸コ浜烘х埍瑙嗛 鎴愪汉浜ゆ崲 澶ц儐鑹烘湳36p 鍐堟湰澶氱华璧板厜 鑰佸瘜濠嗘搷姣旀х埍 缈樿捣楦″反鏃ヤ翰濞 鏃ヨ悓榫欏吔鍥 鏂拌瑙夊奖闄㈠皬缇庡ソ 缇庡コ闇茬┐30p WWWBO50SQWCOM 鍏堥攱褰遍煶鑷叞 杞﹂渿鑷媿p 鎬т氦鐢靛奖鎿嶉肩數褰 涔愬惂瀛愭墜鏈虹増鍥剧墖涓嬭浇 鍋告媿鑷媿鑲ヤ浆褰遍煶 鎴戠埍鐪嬬墖鍏嶈鐗 鍙h堪濂充汉鎿嶉 澶浜掓崲娣姩灏忚 椴侀瞾灏勫皠 骞煎コ閫肩紳 濮愬摜澶ф帆涔 浼︾悊缃戠嫚鎾哥綉 h灏忔父鎴忓彛浜 鐪熷疄濂充汉浣撹壓鏈収 鐑ゅコ纰 灏戝コ鎬х埍缁勫浘鎾告捀鎻 鍋锋媿鑷媿鐢甸洉褰 www涔呬箙鐑璫om 澶уザ閲戝彂缇庡コ鍚冮浮宸 濮愬鐨勬湪鑰12宀 澶滅尗av鍦ㄧ嚎瑙嗛 鎴戞彃鍏ヨ垍濡 WWWWWWHHH222COM 2014鑰佺啛濂 鐖辩埍鍥12p 榛勮壊缃戠珯娣贡灏忚 e7eb6690000073c4 涓滃寳濂借壊鑰佸コ浜 mmm缇庡コ浜轰綋鑹烘湳 鐟熼洦鐣彿 鎴戣鐢锋妧甯堟搷浜 閭eコ鐖辩埍鍥 鎴戠埍鐪嬬墖app姘镐箙鍏嶈垂鐗 寮哄ジ涔变鸡鍑岃颈鎬т氦鎾哥 楂樻竻娴煎皬濯冲鍥剧墖 鎻掓彃濡圭患鍚堢綉 璺笅鏉ヨ垟鎴戠殑閫奸煎皬璇 涔呬箙瑙嗛绯诲垪涔变鸡 pulick浠涔坹d 浜氭床se鍥惧浗鑹插ぉ棣 鐔熷コ鎬х埍缃戝潃 鎾哥偖鍔ㄦ极 鎿嶄綘缁煎悎鍥剧墖 鍢跨墖缃50宀佹棩鏈コ浜哄仛鐖 楠氬楠氱敺鍋氱埍缁忓巻 鎴愪汉缃戞柊钁′含 鎴愪汉缃戜簲鏈堝ぉ鎬$孩闄 濞囧琚帆璁版湵鑼 鑹插濡堝奖闊冲厛宄 鎵f墸娉㈢湅鐪 鏅氳瘽鎿嶅眲 鑹蹭簲鏈堜涵浜畻鍚堣棰 鐖卞唴姊ㄨ姳澶洰蹇挱 鑹茬郴鍐涘洟鎴愪汉 鐢峰+楦″反鎯崇湅缇庡コ灏忕┐ 鍏嶈垂瀛曞鎬х埍鐢靛奖 鍚庡鐨勮鏉 娆х編鑹插惂缇庡コ鑹烘湳鐓 d4ff鐨勬渶鏂扮綉绔 浠ヨ壊鍒梄XX鐧惧害 鍒濇鐨勭編瓒充笣琚滅數褰 鎴戞兂鐪嬮潬閫糰鐗囦簡浜斿叚鍒嗛挓鐨勫綍鍍 涓夌骇鐗囪棰戞懜闃磋拏 娣濂冲ゴ楠氶 ATV444鍏堥攱褰遍煶鐢靛奖缃 涔呬箙鐑柊鍦板潃 91pron绀 youjizz鍜挭 鍙樻佸彟绫诲コ鐜嬭檺楦″反 鑹叉湪鏈ㄤ簹娲 10宀佽悵鑾夎倹浜 鎹嗙粦瑙嗛鏈簞 姣嶄翰琚睏婀块忕殑鑳搁儴姣嶅コ涔变鸡娣崱 鎵嬫満涔呬箙鐪嬩鸡鐞嗙數褰卞厤璐 鏉庡畻鐟炲揩鎾數褰辩綉 鎼滅储鎴愪汉鍋氱埍 鐩楀帟鎾 鎻掑叆鍥捐嚜鎷 鍔ㄦ极榛勮壊鎬у仛鐖辫棰 楂樺鍘嗗尽濮愬瀷鑺濊姖magnet 濂藉悐鑹茬編鍥 浜哄av鏃犵爜鐜懓 寮哄ジ涔辨姟鍣滃晩鍣渨ww9999yacom wwwdiyesao 涓濊缇庤噣鐜╁伔鎷嶈嚜鎷嶄簹娲茶壊鍥 蹇挱浜旀湀澶╁洓鎴胯壊鎾數褰 澶ч钑変紛浜洪粍鐗 褰遍煶鍏堥攱鏉戜笂閲屾矙楝兼眮 閫嶉仴鍧婃垚浜虹數褰 娆х編鎬т氦鍏紑鍏嶈垂瑙嗛 鍥戒骇鎴愪汉缁煎悎鎴愪汉绀惧尯 鍋氱埍寮哄ジ浼︾悊鍥 鍦ㄧ嚎澶ч浮鍚у共灏忛獨璐х炕鐧界溂灏忚 闄堣佸笀鑹崇収闂ㄨ繀闆蜂笅杞 璇嶄笁鍒嗘槬鑹 涔¤鏄ヨ壊 妯变簳鑾変簹bt 妯变簳鑾変簹鍥惧簱 h缃戝鑸 www鍗楅氭紨鍞变細 寮蹇冩縺鍔ㄧ綉浜旀湀澶 涓滀含鐑浘鐗囨紓浜 涓滀含鐑姩婕 涓滀含鐑案涔呭浘 閰掕壊缃戦厭鑹茬患鍚堢綉 鐪嬮粍鐗囧湪鍝噷鐪 缇庡コ鐪嬮粍鐗 杩呴浄5鐪嬮粍鐗 鍝ヤ滑鐪嬮粍鐗 濂冲弸鐪嬮粍鐗 榛勮壊灏忚涔﹀悕 澶╀笂浜洪棿 鏂拌壊涓壊 鍝ュ摜鑹查珮娓 缇庡浗鑹插ぇ鐗 鑹茶礉璐濈數褰 澶钩娲嬪か濡 3p楂樻竻鐢靛奖 Abused 鎻掍笁鍏縺鎯呯珯 鍏ㄦ槸鑹蹭汉褰遍櫌 寮犵闆ㄨ丹瑁稿浘 cumthem 365dhdvd 鐪熷疄鎭愭杤porno 鐧炬捀绀惧尯 鏃ユ棩澶滃 浠栦篃鎾 鑻忔墦楗煎共 閫嶉仴瀹揩娲昏胺 椴侀瞾灏 寮熷紵骞 鎼炵瑧璇佷欢鍒朵綔 鐧借檸绌翠粈涔堟牱瀛愬浘鐗 绂忓埄瑙嗛鍚堥泦300鍏嶈垂 鏇版湰閭伓婕 鑹叉棤鏋 缁煎悎缃 褰遍煶鍏堥攱Measure bt浜氭床缁垮矝褰遍櫌 Jordan Carver AV 鑹插闄㈠奖 绁為┈浜哄鏃х増 yy4410鐞嗚闊╁浗瀹跺涵 ssni 049bt涓嬭浇 涓浗鎼滅嫚鐙犵綉 鎴愪汉鑹虫槦绫冲▍鍗¤帀娉 sdde330 涓濊濉炲叆AV 浜轰汉鎿嶅ぉ澶╂搷鏃ユ棩鎿 鎵嬫満鍦ㄧ嚎鐪嬫尝澶氶噹缁撹。avpn xfplay 娣 鎬ц檺鍦ㄧ嚎瑙嗛 鎬х敓娲诲彛浜よ棰 绉嬮湠鐢靛奖绀鹃暱澶汉 绉嬮湠eeussm蹇挱 缇炶!銈ゃ偪銈恒儵娓╂硥濂虫悳鏌ュ畼钃濆厜鐨勮棰 灏忛磪瑕栭牷 闈掗潚鍥戒骇瑙嗛鍋锋媿鍦熻豹閰掑簵 灏忛粍鐗囩綉绔 杩呴浄閾炬帴 鏇版湰缇庡コ瀚╅鑹烘湳鍥 鏃ラ煩鍥剧墖娆х編鍋锋媿 灏ょ墿铚滈洩鍎垮畬鏁寸増褰遍煶鍏堥攱 鏃ユ湰鍗堝绂忓埄鍓у満 鏃ユ湰濂虫т綋鍐呭皠绮捐棰 鏃ユ湰濂充紭涓濊瓒充氦瑙嗛 娆叉湜鍚у浗浜ц棰戣嚜鎷 鏃ユ湰鐢蜂汉浜插コ浜哄眮鑲¤蒋浠 鍦ㄧ嚎鏉熺細鑷媿 鏃ラ煩缇庡コ瑁镐綋瑁歌噣鍚庤瑙傞槾瑙嗛 蹇嫄鎴愪汉鐭棰慳pp 钀濊帀鍏嶈垂瑙嗛 757鍗堝绂忓埄鍏嶈垂1ooo鍚堥泦 52pao鍥戒骇 榛勭墖閮Q 寮犳煆鑺32鍒嗚棰戠椹奖闄 1鑹蹭簲鏈堝┓濠穉鍦ㄧ嚎瑙嗛 3344hb鏈鏂板煙鍚 鍜屼竾褰辩綉绫讳技缃戠珯 鎴愪汉rct 鐑闀垮畨鍏嶈垂瑙傜湅鍏ㄩ泦鑻嶈媿褰遍櫌 鍗堝褰遍櫌浼氬憳鍖轰綋楠屼竴鍒嗛挓 669楂樻竻鏃犵爜 鎴愪汉蹇挱鏂版灉鏋滅鍒 5YYSP鍏嶈垂褰遍櫌 榛勮壊鐗噊o ts榫 4433鎴愪汉缃 缁垮矝褰遍櫌涓烘偍鎻愪緵鏈鏂版渶蹇殑蹇挱 瑗跨摐褰遍煶 XXo0鈭炩啓鈭 寰數褰辨垚浜虹敓鐢熸椿澶ч粍鐗 5sxq鍦ㄧ嚎鐪嬭棰 灏戝鐧芥磥鏈夊0灏忚 娆х編灏戝澶ч钑 浼︾悊鐢靛奖 ftp 鎶芥彃灏戝瑙嗛娆х編 鐢滀鸡鏂囧88 澶滆壊绂忓埄瀵艰埅-瀹呯敺绂忓埄缃戝潃澶у叏 姘撮噹娣戞儬涓枃瀛楀箷鍦ㄧ嚎 缃戝弸鑷媿鍒嗙被鍦ㄧ嚎瑙傜湅 鍥戒骇杩峰ジ杩呴浄涓嬭浇 qinxiu13 娆х編鏃ユ湰18绂佸湪绾胯鐪嬭棰(3绾у彂甯) n4dy鍥借揣鑷媿 ak绂忓埄 鍦ㄧ嚎 鍏ヤ镜鑰呴珮娓呮棤鐮 99re66瑙嗛鍦ㄧ嚎鎾斁鍥戒骇8 杩呴浄鎴愪汉澶栧浗閾炬帴 浜氭床灏忚棰戝湪绾挎挱鏀 WWW4438COm wwwx1360con ww8o6o浼︾悊鐗 娆х編Av3d鍔ㄦ极鐨勭綉绔 铦岃毆绐濇棩鏈湪绾 3017鏈鏂板浗浜ц嚜鎷 鐧介笩妯卞コ鏁欏笀鍦ㄧ嚎瑙傜湅 RCT-230 mp4 闊╁浗婕旇壓鍦1313鍦ㄧ嚎瑙傜湅 鑹叉订瀵艰埅 鑹叉褰辫鑹插ぉ澶╄棰 瑗垮窛缁撹。澶氬皯閽卞彲浠ョ帺 ONSD783 avmaO88 鑹捐开閱夊湴绁ヤ粩 鎮犳偁韪╄笍 瀚傚瓙缃 瓒呯瑙嗛鎴戠殑 xxx灏忔竻鏂板奖闄 涓侀濠峰┓鏍″洯灏忚缃 鏂硅弲鐞嗕鸡 娓℃繎鏅剁毊闈╂崋缁 椋庨棿鐢辩編鑹茶鐢靛奖 鏄ユ殩鑺卞紑鎵嬫満鏂扮増鍏ュ彛2018 澶у⿰鍝ヨ祫婧愮兢 澶х澶滃簵鎼鏋佸搧缇庡コ甯﹀洖楂樼骇瀵撴墍闃冲彴涓鐩村共鍒拌帋鍙 缁忓吀涔橀獞濮垮娍鐙傛搷 寮哄ジ涔变鸡鐙犵嫚鐖辩數褰 闈掑ū涔恔k4d 鐢峰瓙鏃ラ艰棰憍xx 澶╁ぉ鎿嶅ぉ骞插ぉ骞插ぉ澶╂棩澶╁ぉ褰遍櫌 娑╂订91 榛戣壊鎬т氦瑙嗛鐗圭骇鐗囦汉涓庡吔鎬т氦鐗圭骇鐗 鐪嬬湅绂忓埄瑙嗛92 娉㈠効褰遍櫌 澶уザ鑲ュ﹩鑷媿鍋锋媿鍥剧墖 鎴戜笅杞藉彲浠ユ挱鏀剧殑鎬х埍瑙嗛,涓嶆槸鐧界殑鏂囧簱 鏃ユ湰缇庡コ鍐呭皠楂樻竻瑙嗛 绁ヤ粩瑙嗚av澶х墖鍚堥泦 銆愬辜濂炽戝彲鐖卞皬骞煎コ閫兼瘺娌¢綈琚竴鍦堢敺浜哄噷杈辫疆濮﹂ギ灏夸腑鍑哄噷 27绂忓埄鍗堝绂忓埄瑙嗛 涓鍙戝け璇姩婕棤淇叏闆嗗湪绾胯鐪 浜氭床鍦ㄧ嚎XO鈪㎡鏃ユ湰鍦ㄧ嚎 涓鏈埌褰遍櫌缃 涓婂師浜氳。鍦ㄧ嚎楂樻竻 鑼夊摜褰辫 ftp 婀垮褰遍櫌鐮磋Вvip 鏃ユ湰鍔ㄦ极 鎼滀竴鎼 鍥戒骇鍦ㄧ嚎鏈嶄簡 绁為┈褰遍櫌涓夌骇鐗 magnet zipai鍥芥ā 鑿茶彶瑙嗛鐖变亢瑕佸皠 娆х編鍏戒氦鍦ㄧ嚎楂樻竻瑙嗛 鍔ㄦ佹х埍瑙嗛瑙傜湅 浜氭床闃縱澶╁爞缃2018鏃犵爜 鑸旈槾璧勬簮 mp4 鐞嗚鐗囧乏绾212鏃犻伄鎸 鐞惇褰遍櫌77lcd 浣愪綈鏈ㄦ槑甯屾湭浜′汉鍦ㄧ嚎瑙嗛 8x8x8x鎴愪汉缃戝崕浜 澶氫汉鍋氫汉鐖辩殑瑙嗛鍥剧墖 鍙跺瓙妤d笁绾ф湯鍒犺棰 鑽変粬濡堟瘮瑙嗛 浜斾簲鐢靛奖闄鸡鐞嗙墖 鏃ユ湰涓枃瀛楀箷 鍥戒骇鑷媿 褰遍煶 骞插濡归珮娓呭湪绾垮奖闄 鍟暘褰遍櫌鎬т氦瑙嗛 鍥戒骇鍓ф儏绮惧搧灏忚棰 銆愭缇庢х埍-鏃ユ湰AV涓嶅崱鍦ㄧ嚎瑙傜湅_涓嶅崱鐨勬棤鐮侀珮娓呯殑av_鏃ユ湰鑰佷汉av鍦ㄧ嚎瑙傜湅銆慼tt 涓浗鏄竴绾ф瘺鐗 澶╁ぉ鐪嬪ぉ澶╁共澶╁ぉ鍟 涓绾х壒榛勭壒绾ч粍鑹茬敺濂宠8浣撴т氦姣涚墖 浠斾粩缃戜鸡鐞 灏忛浮宸村湪绾 浼︾悊鑱氬悎111pdy shanrnxingjiaoshipin 纾佸姏閾 濂借幈鍧炲コ鏄庢槦 鍥戒骇濂冲拰鐙楄繀闆 閰掕壊缃戝暘鍟 鎴戣鐖变箙涔呭奖瑙嗘彃鎻 av涓瓧鏈夌爜鍦ㄧ嚎瑙傜湅鍏嶆挱鏀惧櫒 鍥戒骇av涔辫棰 kkkk1112瑙嗛 寰俊榛勭兢鍏嶈垂璇曠湅 鎵嬫満鍦ㄧ嚎鎾斁榛戦拱鍧犺惤h鏈垹鍑忕増 瓒呯椤甸潰60down 榛勯〉缃戠珯瑙嗛鍏嶈垂浼氬憳 - 鐧惧害 eee778鏈鏂扮綉鍧 鍟暘鍟數褰辨极鐢 娣涓濈邯 鎴愪汉鏃ュ眲瑙嗛 娑╂挱闊抽 AW褰遍櫌 鑻嶈媿鍗堝鍏嶈垂瑙傜湅 涓夌骇 蹇冨姩濂冲弸绗竷澶╃鍒╄棰 鎬ч獨鍋氱埍鍚冨ザ瑙嗛 鎬ф槬鏆栬姳寮鑷媿鏂版墜鍖 鎬ф劅濂崇涔﹀ぇ濂剁鍔涢摼鎺 闂蹭汉鍚уぇ棣欒晧鍦ㄧ嚎瑙嗛 鎬ф缇巋dee 缁窛閲岀华鏃犵爜浣滃搧鐢靛奖 浜嗚В鑳屽悗鐨勬垜av缇庡浗鐢靛奖 鐧惧绂忓埄瀵艰埅 姘村師姊ㄨ姳鍔犲嫆姣斿奖闊冲厛閿 av銆367,pw www5595con 鍝ヤ篃灏勭患鍚 绉嬮湠缃戠珯鏃犵爜鐗囧厤璐硅鐪 4438x8鎴愪汉鍏ㄥ浗鍏嶈垂瑙嗛 娓╁〾涓嶉泤瑙嗛 ftp 涓涙灄璧ゅ瓙蹇冪櫨搴﹁创鍚 澶ф尝濂冲绌垮唴琛h棰 绔嬪満 鍏嶈垂鍦ㄧ嚎鐪 澶栧浗鍏嶈垂褰遍櫌100000 缃戠孩缇庡コ纾佸姏閾炬帴 鏃ラ煩鍐欑湡绂忓埄鍦ㄧ嚎瑙嗛 娣贡瑙嗛鎿嶅眲 淇勭綏鏂コ浜烘搷B 鍋氱埍瑙嗛鍏嶈垂瀹夎缃戠珯 浜哄av瑙嗗睆鍦ㄧ嚎鍏嶈垂瑙傜湅 澶уザ鍥借 magnet 浜轰汉灏勪綋鍐呭皠鍦ㄧ嚎瑙嗛 94绂忓埄绀惧尯浼氬憳 1769鍦ㄧ嚎瑙嗛涓涓冨叚 sm璋冩暀鐏岃偁瑙嗛鍦ㄧ嚎瑙傜湅 浜旀湀濠峰┓鍏湀鍚堜亢鏉ヤ篃婕敾 free娆х編浜虹嫍浜掍氦 91璺 濠峰┓涔濇湀涓侀 瓒呯920鍦ㄧ嚎 浜氭床va澶╁爞鏃犵爜鍦ㄧ粧 Av3847 鏉ㄥ箓1鍒11绉掕タ鐡滃湪绾 鍦ㄧ嚎绂忓埄鐢靛奖瀛楀箷鍏嶈垂瑙傜湅 涓嶅彲鑳藉畬鎴愮殑浠诲姟H鐗 鏈缈兼墜鏈哄湪绾挎挱鏀 褰╃敾鍫傜晫闄 鍏嶈垂榛勮壊AV gg267c0m 91鐖辩埍绀惧尯 99鎿嶄汉浜烘搷 浜轰汉濡讳汉浜鸿秴浜轰汉涓鏈亾 楂樻竻娆х編16P 鍓嶇敯棣欑粐楂樻竻鏃犵爜 鍚堥泦 璋冩暀涓濊鍦ㄧ嚎鎾斁 bbi-206 se sex 浜插弸鎭嬬埍涓枃鏃犵爜 涓嬭浇 鏄庢槦鍚堟垚绋缂鸿祫婧愬叡浜渶鏂板湴鍧 浜氭床 gegese 鐖嗕钩鎿嶉艰棰 瓒呯97澶ч钑4438x saozi8浼︾悊 sepapac 鎻掑簢 骞插鏍峰厤璐硅棰戠櫨搴 浜旀湀澶╃儹宸村悎鎴愯棰 灏勫濡囧奖闄 鍐呭皠濡瑰瓙鍏嶈垂瑙嗛 pppd481鍦ㄧ嚎瑙傜湅鏃犵爜 娆х編鎬х埍瑙嗛鐔棬璧屽満 楦″惂鎿嶅眲瑙嗛 1鍟婃棤濂楁竻鏅 鏃ユ湰瑙嗛娣贡 鍋锋媿鑸嶅弸璧勬簮 绱鍩庤蕉浜嬪摢閲岃兘鐪 VID688 涓潙鐭ユ儬鏈夌爜鍦ㄧ嚎瑙嗛 鏃ラ煩AV榛戞湪鐪熺敱缇庣數褰遍椤 鍠疯鎺ㄨ崘瓒呯█鏈夌數瑙嗘埧 鐙犵嫚骞茶繀闆蜂笅杞 2019浜氭床鍥藉唴鏃犵爜鍋锋媿瑙嗛 鍐呭皠,瀹鹃骞茬偖瑙嗛 涓濊灏戝3p鍦ㄧ嚎瑙嗛 鎴愪汉浼︾悊绂忓埄 qvrt-049纾佸姏閾炬帴 鐧惧害鏃ユ湰涓鏈亾鍏嶈垂瑙 鐔熷コ浜哄涓嚭 鍖椾含灞勫眲灞勫眲灞勫眲灞 閫兼帆濂宠棰 鎿嶉兼崗濂舵娊鎻掕棰 鍥戒骇鍦ㄧ嚎鍋氱埍鍏嶈垂瑙嗛 鑽夋Υ鍦ㄧ嚎鑷媿 鍥戒骇鍦ㄧ嚎瑙傜湅涔充氦 鑹叉缇1314 姝e湪鎾斁鍥戒腑濂崇敓涔虫埧鐪熷 瀹鹃澶骞叉椿鍋锋媿瑙嗛 鑾夊摜涓嶉泤瑙嗛涓嬭浇 鏋佸搧鐧藉缇庡コ涓绘挱鏋佸敖璇辨儜,鍠滄鐨勪笉瑕侀敊杩16 鐮村榛勮壊瑙嗛 涓绾фт氦鍋氱埍榛勭墖 淇轰篃鍘荤患鍚堢綉鍦ㄧ嚎瑙嗛 澶╁ぉ鑽夊湪绾胯鐪嬭棰 娑婃秺褰卞簱鐢峰コ瑙嗛 988澶ч浮宸存彃缇庡コ澶ч艰 91宸ㄧ綉绔 浜氭床av鍓ф儏 wwwjingziwou8 涓涓ぇ閬撲紛浜烘$孩闄 鐞惇鐢靛奖姹″灑 澶╂棤鏃ュぉ澶╁皠涔呰崏 鑾插疄鍏嬭暰鍎垮湪绾跨湅 涓侀鎴戠綉浜旀湀濠峰┓ 澶栧浗鍏戒氦瑙嗛 鍏嶈垂鐨勬挱鏀惧櫒 璋冩暀鍫曡惤涓棩闊╃編濂 鎵嬫満2014鍥戒骇鑷媿 姣忔棩鏇存柊鏃犲悧鑷媿瑙嗛 鑹插枩鍥芥ā瑙嗛鍦ㄧ嚎鎾斁 涔变氦鍦ㄧ嚎瑙嗛 灞辩敯鐩寸編绉嶅瓙 绮惧搧缇庡コ_99澶滆壊绂忓埄 濮愬紵鎶芥彃鎷嶆媿鍏嶈垂瑙嗛 寮哄ジ鍐呭皠 鍦ㄧ嚎瑙傜湅 楦″反鎻掑叆灞佽偂鍟暘鍟 涓冩鐙奸潚闈掗潚鏈鏂 杩戜翰涔变鸡瑙嗛 鐑棬鎺ㄨ崘缇庡コ鍔ㄦ佸浘鐗囬珮娼甔X00鍙堥粍鍙堣壊鍔ㄦ佸浘 濂囩背褰辫缃戠珯璧勬簮 浜曚笂鐬冲湪绾挎挱鏀 绨ч媿鐗噆k guochantoupai ipz-576鍦ㄧ嚎鎾斁 浼︾悊鍥剧墖瑙嗛浜氭床鏇版湰 鏃ユ湰濂充紭鑰佹箍褰遍櫌av鎴愪汉褰遍櫌 宀涘浗绉嶅瓙鎼繍宸ュ畼缃 eeussa cok 瓒呯骇鍠靛柕鎼滅储缃戠珯 浜氭床鍖荤敓涓庢姢澹10p 鎶栭煶琚皝婕忕偣鐨勮棰 4338x鍗庝汉姘镐箙鍏嶈垂 榛勮壊褰曞儚涓绾у甫骞查奸肩殑涓绾у甫涓嶇┛瑁ゅ瓙鐨勪竴璧峰甫 濂崇敓瀵濆鐨勫▏鍠樺0 榛勮壊瑙嗛璧勬簮鍚 榛勮壊鑷叞瑙嗛鎵撶畻澶у叏 濂虫牎闀垮緱绉佹 榛勮壊ppp灏忚棰 鍚庡叆涓鐢熸搷閫艰棰戠綉绔 濂崇涓绘挱鑴辫。鐑垶绂忓埄杩呴浄涓嬭浇 gegezyzy 澶ч钑夌嫾鐙兼棩浼婁汉 闈掗噹鍦g綏绉嶅瓙 娑╂订绂 濯氬績涓绘挱鎴峰鎼笎绡 娉㈠閲庝竴鏈亾鍦ㄧ嚎鎾斁 灏ら叿浼︾悊鐗 鑹茶壊鐢靛奖 ed2k 92鐪嬬湅c 鍙夌墰b 鑹叉挱浜旀湀浜氭床缁煎悎缃戠珯 - 鐧惧害 鎴恮wwccc977 瀹岀編鐪嬬湅楝肩埗 绂忓埄绨х墖 xxxvdynom [榛戠櫧涓枃]鑹お瑕佺户缁伔涓滆タ 鍥藉唴鐪熷疄鍋锋媿 鎵嬫満鍦ㄧ嚎 鍐滄皯av 鍚緸鑽夊湪绾胯棰 鍥戒骇 鍥戒骇鑷媿鍋锋媿鍦ㄧ嚎瀵艰埅 鍥戒骇涓绘挱鏃犵爜杩呴浄 鍥戒骇鐪熷疄鎹㈠鍦ㄧ嚎 鍐呰¥鍝ユ搷S褰㈤粦涓濊繛浣撴湇楂樿窡宸ㄤ钩鐨勫鎴跨粡鐞嗕赴婊″ぇ濂跺瓙榻怋灏忕煭瑁欑湅鐫灏辨兂鎿 鍐呰挋鍖呭ご灏忛獨濂冲姙鍏灏忛湶鑴歌嚜鎷嶇涓夊脊 瓒呯骇鑹宠垶 鍏嶈垂绾夸笂楂樻竻a鐗囩洿鎾 鍏嶈垂鐐规挱澶уЖ濡堝奖闄 鏉惧潅瀛e疅瀛17纾佸姏閾炬帴 鍛﹀懄绂佸 鑷媿鍋锋媿绂忓埄绀惧湪绾 shenaijiq 涓鏈亾榛勮壊鍏嶈垂瑙嗛 浜氭床鐔熷コ浼︾悊 褰遍櫌 涔呰崏鑹叉柊鍦ㄧ嚎褰遍櫌 鑹插枃鍙秴纰板湪绾胯棰 娆х編瀹跺涵娲惧纾侀摼鎺agnet 鍥戒骇灏忕惔 91绂忓埄鍔ㄦ极 magnet 鎿嶅悓浜嬬殑灏忓コ鍙嬬埍鍓緫 寰媿绂忓埄浼︾悊鐗 涓侀浜旀湀娆ф床瑙嗛鎾斁 888kbkb 涓滄柟鍥涜檸av浼婁汉 鎻掕悵鑾夊奖闄 1315褰遍櫌 涔呬箙鍙d氦鎻掕悵鑾夊奖闄 绁炵埗A鈪ゅ姩婕湪绾胯鐪 涓夌骇绾剁悊鐗 闈掓槬鑽変紛浜哄厤璐硅棰6 鍙互鎾斁鐨勮棰 瀛曚氦 8aay璺痗om 闈掗潚鑽夌鍒╄棰戞挱鏀 鎴愪汉MV鏃犵爜濂充紭 hulisecon 娆х編鍋氱埍鑹插浘30p 缇庡コ鍐呭皠濂楀浘 鑱婃枊涔嬫帆鑽$嫄浠 鏂拌壊鐚挭瀵艰埅缃戠珯 鏈変竴涓コ妯$壒鍙┈浠涔 缇庡眲鍥剧墖缃 寮犵闆ㄧ殑娣崱 璇辨儜浜轰綋鑹烘湳涓绾у浘鐗 濮愬鏁欐垜灏 缇庡浗鍗佹鍟﹀氨鐖卞共澶勫コ 娆ф床涓嬩綋鑹烘湳鐓 浜轰綋鑹烘湳鏃ユ湰鐢佃剳 楠氶煎仛鐖辨晠浜 濂宠壊骞紃mvb 鎬х埍涔眕婵鎯 娆х編娣玸ex 闀跨瘒骞煎辜灏忚 褰遍煶鍏堥攱鍋囬槼鍏锋彃缇庡コ 娣鐢熷仛鐖卞浘 qvod鎻掕強鑺辩綉 澶ч檰椤剁骇鎴愪汉a鐗 瑁镐綋闈欑浉 缇庡皯濂充钩鎴块槼閮ㄤ汉浣撹壓鏈浘
小熙公主夫妻华为mp4 合集 国产 下载 嫩 波多野结衣资料 牛鼻色 性爱激情亚洲图片 上妹妹影院成人片场 很很撸乱论小说 欧美尻逼 都市激情系列片 abcdduppid1 bt色动漫 国产三圾片在哪里观看 电车坐爱情视频 妹妹勾引哥哥 国产50路大骚女网 佐藤遥希的护士做爱哟音影先锋 www90porn 来插妹妹的小穴 婶婶的黑逼 家庭乱伦性av 草裙美女人体写真 深田恭子爽片 夫妻爱爱快播电影 shaofuzipai 韩国有成人黄色图片吗 熟逼图片 姐妹色 沁佩儿图片 女人手滛教学 貌美奶大的性感美女一直不停的揉捏大奶子031616 乱伦鸡巴小说 少爷 雏 开苞 日本大友组 狂干农村少妇 欧美高跟丝袜足交 极品少女自慰图片 黄蓉de下穴 他州色图 超大胆性感美女图片 淫香色语 偷拍三角形abc 激情爱爱图成人 成人亚洲快播 奶大骚水对白清晰 亚洲自拍撸影音先锋 8899uwwcom 孕妇阴沟 mmypus alaka 小鱼儿与花无缺主题曲 f12012赛程 广汽toyota jiying 纹银 张嘉汶 香水电影影评 嗷嗷插老婆嗷嗷叫操逼 性爱办公室高清 大波撸影音 韩国少妇15p 日韩美女裸体自拍艺术照 WWW_YEIUBA_COM 青州二中学生在教室里做爱 喜爱肉棒的淫荡母亲橘美沙rmvb 日本美女比比人体艺术图片 小姐性交视频 山西激情黄色性交带成人激情 少妇艳穴 www18ricom 女人与公狗交配视频 欧洲美女性交的影片 美女姓爱图片 少妇秘书肉体伺侯老板性交小说 西西pian30398 色导航色狼影院 性爱录音吧 有关欧美色图 想操逼了怎么办 a片水浒 94草b 自拍偷亚洲色图吉吉影音 插射图片区 我爱丝袜赤裸色宫 日本好看的av女明星 迷奸妹妹影院 洗澡前先吃伟哥 插入小姨子岳母小穴电影 日本熟妇乱伦自拍视频 人妻色图2 WWW_JIYOUZZ_COM 欧美视频白白色 阿片无限看 yazhouwuwuma 岳阳县荣家湾东风路的妓院打炮要好多钱 av里是真吃精 英国美国人体艺术露下体 女人的刚门有多深 d36471a80002640a 萝莉av优篠崎爱 自拍偷拍乱伦小说图片 亚洲系列去干网 大胆人体艺术摄影鲍 处女好逼连连看图图 下体局部人体艺术摄影 五月天撸撸图片 女人与狗文 xiaonvhaizuoaigushi 和熟女性爱视频 眯眯色吧 五月天色五月第四色 www777影音先锋 媛媛的变态老公绿岛电影院 插儿子老婆比小说 人妖在床上干妹妹 坛蜜性爱图 迅雷天天色综合区 德国人与兽交配 色播导航网站 爱爱帝国钟和区图片 王晶黄色电影 boboxxoo 猫色综合网qvod 美红婷 zhifusiwatupian 恋夜影院突然打不开 韩国神马影院电影 最漂亮的波图片 找个能看黄片的网站 爽歪歪黄色电影综合网 银耳丹叁红早z 汤方房间大胆秀私处 色悠悠综合影院下载 成人网址为什么看不了电影 儿子摸妈妈阴部视频 成人性爱图片网址 老熟妇3p的故事 t56人体系艺人摄影图片 WWWDAONOUCOM 空姐被黑社会性侵犯图片 激情性爱30p jjvod中森 成人美女贴图区 www8se9se WWWCCAVDCOM 成人宗合 公媳交换乱伦 ppp36官方网站 先锋影音无码自慰电影 舒淇的性乐趣 美国免费黄色电影 黄书15p 快播做爱p 深夜草逼影院 黄色小说图片电影手机免费在线观看 美少女死奸115 我爱看片安全吗 视频下载 淫会片 77diecn 青青青观看大屌色 公司机WWW日本黄色 日日干日日拍 h七七WWw52AVAVC0m 丝袜风骚熟女 波多悠悠 922bbbcom弟一集 虎牙huangsedianying 小伙操熟女 裸体美女p照片 公公忍不住热吻儿媳妇巨乳 骚老太太乱伦 apianwang 爱蜜桃校园春色激情小说 怡红院221aacom221aacom 夜射狼AV亚洲在线视频 网页无法访问dddd94com 久久撸撸精品 鲁xxoo 麻美由真免费观看pleddctbszcn 网友偷拍大香焦 大东方三级片在线观看 本网站最新获取 少女粉鲍b艺术图片 亚洲电影欧美在线www080xxcom 欧美男同视频电影网站 苍井空淫乱妻 日本少妇风骚大但露出bb图 艾玛沃特森合成图 8wawacomwww8wawacom 用力干插人妻办公室 1024野狼社区 夜夜撸勾引骚女亚洲 校园情se 春色阁楼手机在线 毋子轮乱wwwhuiyang168com 插入b里使劲操 wwwyoujizzcom人妖 熟妇在家自拍图片 我的高级家庭女教师 东北妞和哥哥激情 涩涩爱涩涩 亚洲在线视频12avaaaaaaa 三级黄色大波 久久热在线视频99色姐姐 奶大的美女老婆被人内射 黄蓉淫传 虚渊玄抱大手办图 知丰满骚女人激情 色戒天使在钱观看 日本体操ed2k 乳房真人样品展出黄色网免费在线黄色电影黄色成人快播电影伦理电影黄 亚洲肌肉男porn 色色色网做爱性生活视频 sexsss 爆乳女教师ed2k 宝贝开心成人 青青草成年黄色网站 王李丹妮的蜜桃 偷拍自拍分享视频wwwooxx5comwwwooxxbbcom 老熟妈真实露脸 自慰爱爱电影 手机电影日本成人ccc 老母淫 乱伦电影手机在线观看网 裸阴部女模汤fang 裸体女下一篇 快播欧洲成人套图网 操张静的逼 成人片帝国 免费网站你们懂得wwwlu2310com 两口子cgaobi视频 强奸柳岩的故事 港台美女做爱 羌女阴部大全美16P性爱图片 自拍区偷拍区亚洲区wwwsggw99comwwwranshaocom 010各国美女鲍美 www502日本少妇com 日本人添逼逼视屏 韩国日本偷拍黄色录像 avsm动物 红杏综合 家族乱伦做爱小说 幼女幼齿亚洲无码 偷拍自偷人妻自慰 无客户端手机看a片 撸一下哥网 日成人图片 可以免费看黄片的网站 吴亚馨李宗瑞全集完整版李宗瑞9小时45分59秒275g压缩版流出 性奴韩国电影 忙里偷闲操美女衅视频 明星淫乱美屄 清宫性史免费观看 7m视频成人直播在线观看午夜福利视频 亚洲无码在线视频区 pgd720 俄罗斯美女图书馆 红番阁手机在线观看wwwhongfange888net 日本熟女四十路 男男互日小说 自拍av射射 伦理琪琪影院a 亚洲做爰套图 人妖Xx图片 btAv天堂网 淫媳荡翁的性生活爱爱 偷拍幼女性交 日韩成人色贴图 www644secom 国产乱论人兽 嫩模与炫富女对赌 自拍操打电话的大陆女人 wwwsesekongcom 画室AV 884cno 江波りゅう在线播放 东欧av天堂亚洲欧美 猫咪成人视频免费 住院时的我狂插来隔壁 wwwwoyaolucom jlzz怀孕 呦呦视屏 北原多香子全集种子 关于旗袍丝袜的伦理片 3DAVmp4 99热久久操干狠狠 好吊日先锋 美国成年免费网站 www古典武侠狠狠 猎艳午夜图片手机下载 淫荡女友校园 94撸撸 人人看人人操人人摸 小淫虫 邪恶漫画之老师 安妮贝拉老熟妇性16P 小学生裸照 wwwuuuu28com 厕所偷拍亚洲欧美 日本骚妞性交视频 免费在线观看欧美成人 人妻福利 夜夜干夜m wwwll777 偷植操人妻视频 爱搞逼逼综合X图 大姨子偷欢 性爱人人操人人看免费视频手机在线 A片黄色小说网 淫乱影音性爱 h七七p七cn 色尼姑网亚州第一视频 伊人网在线视频 曼娜回忆录全文 想看大鸡巴干骚货的视频上传老司机 www575avC0m 日本无码人人色在线视频 三级片南京大屠杀电影完整版 操我在线观看中字 wwwanquyue5 少妇自慰给5岁儿子口 淫妻降临 与青青草相似 久久热要 棚户区站街女vod 790成人撸片 去干网没有了怎么办 自拍偷拍亚洲色图色素 476avcom 激情W网 a4u全集av 吉吉幼女 m3344tt 强操 劳拉做爱图 亚洲有码视频 激情mmav 经典街拍偷拍 抽插摸乳射家庭乱伦 美子与公猪杂交产子 做爱的网址是多少 男为女口交图片培训 熟女强奸在线视频 苍井空磁力链连 女明星激情电影下载 色酷色mp4 英语翻译 av12直播在线 无码在线va日韩 韩日在线AV 欧美性爱wwwitcwwww78gncom 444ee 一本道老熟女 在线看的免费成人网站 WW11AABBCOM 2048社区 swww22bbmmcom japanese妈妈乱vidoes caopron在线大香蕉 vava的b 哪有成人3d电影 网友自拍网址 日本幼女福利 滛色综 淫妻爽歪歪 协和伦理片第929页 黄色aA片magnet WWWXXse 两男一洞图解 AV亚洲色图东方 甜心空姐magnet 哥哥我想要txt视频 黄色电影之捆绑式 人体艺术性电影 三l级电影济公 25p亚 世界的美少女免费播放 伊人大香蕉av狼人干 大鸡巴插老婆magnet seserm6org 撕开老婆的逼 先锋中字无码资源 wwwjj14con 张柏芝人体艺术艳阳高照 t58cc 欧美人体彩绘图片 鲁鲁色图片图图 我要抽插老师的小穴黄色小说 少妇合集小说 全色网com 147人体性艺术 hongkongav 饿美大鸡巴图 丝袜少妇抠逼 女人人体艺术照片 强奸幼女全文性爱小说阅读 免费 性感的美少妇肏屄 和公公猛插 做哎美女图片 抽插骚嫩穴图 女人为驴马配种 新婚妻子我爱操人妻 九零后嫩穴 WWW_SESE888_COM 苍井空老师全集种子 无码 内射 种子 色图现在哪能看 ddd色先锋 巴西丰满做爱 星月まゆら 16p 国模冰冰 插美女的逼 日木人体图片 美国十次骚屄老师校长电影 自拍情人 内射 武藤兰全裸 免费大咪咪网 火淫忍者小说h纲手 母娘阿姨任我干快播 干岳母av 人体模特徐若宣 插入阿姨网 国模 露逼图 强奸淫水捆绑 熊黛林被强奸小说 美熟女志村玲子阿姨 快播在线 波多野结衣 90后很萌女孩上课前被男友搞了一炮她还说快点要迟到了国语对白迅雷下载 莫妮沙娜种子 能播放的欧美群交视频 日本极限拳交qvod 无码口交内射 mei屄 穿上内衣挺丰满脱了就平了 亚洲鸡吧图片 我插骚妈妈穴 网友自拍偷拍下载 西洋骚货之789 大人胆体 欧美裸体骚男 日本女性pp艺术照 xingai888 女人人体穴艺术 小泽玛丽亚电影全集 吐鲁番租房 婚前协议书 经济师考试成绩 双曲线型冷却塔 李禄思 2012浙江会考 杨紫的男朋友 angelbeatsop 大瓷商主题曲 欧美生殖器全裸艺术图 求性爱视频 兽兽足交 强奸乱伦东京热快播 色域成人网 WWW399BBCOM 欧美综合自图 色救救综合网 成人揉奶小说 交媛妹视频 午夜成人论坛 欧美大肥妞性爱视频 鲍鱼嫩穴 万人靠网站黄色淫秽小说图片 我要看黑人做爱 全裸体大胆马六人艺术美女图片 日女与大吊快播 欧美颜射之色河马 干老女人性爱视频 成人交换 大胆艺术36p 冈本多绪走光 老富婆操比性爱 翘起鸡巴日亲娘 日萌龙兽图 新视觉影院小美好 美女露穴30p WWWBO50SQWCOM 先锋影音自慰 车震自拍p 性交电影操逼电影 乐吧子手机版图片下载 偸拍自拍肥佬影音 我爱看片免装版 口述女人操逼 夫妻互换淫动小说 鲁鲁射射 幼女逼缝 姐哥大淫乱 伦理网狠撸网 h小游戏口交 真实女人体艺术照 烤女碧 少女性爱组图撸撸插 偷拍自拍电雕影 www久久热com 大奶金发美女吃鸡巴 姐姐的木耳12岁 夜猫av在线视频 我插入舅妈 WWWWWWHHH222COM 2014老熟女 爱爱图12p 黄色网站淫乱小说 e7eb6690000073c4 东北好色老女人 mmm美女人体艺术 瑟雨番号 我被男技师操了 那女爱爱图 我爱看片app永久免费版 强奸乱伦凌辱性交撸管 高清浪逼小媳妇图片 插插妹综合网 跪下来舔我的逼逼小说 久久视频系列乱伦 pulick什么yd 亚洲se图国色天香 熟女性爱网址 撸炮动漫 操你综合图片 嘿片网50岁日本女人做爱 骚妇骚男做爱经历 成人网新葡京 成人网五月天怡红院 娇妻被淫记朱茵 色妈妈影音先峰 扣扣波看看 普通话操屄 色五月亭亭宗合视频 爱内梨花夫目快播 色系军团成人 男士鸡巴想看美女小穴 免费孕妇性爱电影 后宫的规条 欧美色吧美女艺术照 d4ff的最新网站 以色列XXX百度 初次的美足丝袜电影 我想看靠逼a片了五六分钟的录像 三级片视频摸阴蒂 淫妻女奴骚逼 ATV444先锋影音电影网 久久热新地址 91pron社 youjizz咪咪 变态另类女王虐鸡巴 色木木亚洲 10岁萝莉肛交 捆绑视频本庄 母亲被汗湿透的胸部母女乱伦淫荡 手机久久看伦理电影免费 李宗瑞快播电影网 搜索成人做爱 盗厕撮 插入图自拍 动漫黄色性做爱视频 高学历御姐型芝芝magnet 好吊色美国 人妖av无码玫瑰 强奸乱抡噜啊噜www9999yacom wwwdiyesao 丝袜美臀玩偷拍自拍亚洲色图 快播五月天四房色播电影 大香蕉伊人黄片 影音先锋村上里沙鬼汁 逍遥坊成人电影 欧美性交公开免费视频 国产成人综合成人社区 做爱强奸伦理图 在线大鸡吧干小骚货翻白眼小说 陈老师艳照门迅雷下载 词三分春色 乡见春色 樱井莉亚bt 樱井莉亚图库 h网导航 www南通演唱会 开心激动网五月天 东京热图片漂亮 东京热动漫 东京热永久图 酒色网酒色综合网 看黄片在哪里看 美女看黄片 迅雷5看黄片 哥们看黄片 女友看黄片 黄色小说书名 天上人间 新色中色 哥哥色高清 美国色大片 色贝贝电影 太平洋夫妻 3p高清电影 Abused 插三八激情站 全是色人影院 张筱雨赤裸图 cumthem 365dhdvd 真实恐怖vporno 百撸社区 日日夜夜 他也撸 苏打饼干 逍遥宫快活谷 鲁鲁射 弟弟干 搞笑证件制作 白虎穴什么样子图片 福利视频合集300免费 曰本邪恶漫 色无极 综合网 影音先锋Measure bt亚洲绿岛影院 Jordan Carver AV 色夜院影 神马人妻旧版 yy4410理论韩国家庭 ssni 049bt下载 中国搜狠狠网 成人艳星米娅卡莉法 sdde330 丝袜塞入AV 人人操天天操日日操 手机在线看波多野结衣avpn xfplay 深 性虐在线视频 性生活口交视频 秋霞电影社长夫人 秋霞eeussm快播 羞耻!イタズラ温泉女搜查官蓝光的视频 小鴅視頻 青青国产视频偷拍土豪酒店 小黄片网站 迅雷链接 曰本美女嫩鮱艺术图 日韩图片欧美偷拍 尤物蜜雪儿完整版影音先锋 日本午夜福利剧场 日本女性体内射精视频 日本女优丝袜足交视频 欲望吧国产视频自拍 日本男人亲女人屁股软件 在线束缚自拍 日韩美女裸体裸臀后视观阴视频 快狐成人短视频app 萝莉免费视频 757午夜福利免费1ooo合集 52pao国产 黄片郡QQ 张柏芝32分视频神马影院 1色五月婷婷a在线视频 3344hb最新域名 和万影网类似网站 成人rct 热血长安免费观看全集苍苍影院 午夜影院会员区体验一分钟 669高清无码 成人快播新果果福利 5YYSP免费影院 黄色片oo ts龚 4433成人网 绿岛影院为您提供最新最快的快播 西瓜影音 XXo0∞↙∞ 微电影成人生生活大黄片 5sxq在线看视频 少妇白洁有声小说 欧美少妇大香蕉 伦理电影 ftp 抽插少妇视频欧美 甜伦文学88 夜色福利导航-宅男福利网址大全 水野淑惠中文字幕在线 网友自拍分类在线观看 国产迷奸迅雷下载 qinxiu13 欧美日本18禁在线观看视频(3级发帖) n4dy国货自拍 ak福利 在线 入侵者高清无码 99re66视频在线播放国产8 迅雷成人外国链接 亚洲小视频在线播放 WWW4438COm wwwx1360con ww8o6o伦理片 欧美Av3d动漫的网站 蝌蚪窝日本在线 3017最新国产自拍 白鸟樱女教师在线观看 RCT-230 mp4 韩国演艺圈1313在线观看 色涩导航 色欲影视色天天视频 西川结衣多少钱可以玩 ONSD783 avmaO88 艾迪醉地祥仔 悠悠踩踏 嫂子网 超碰视频我的 xxx小清新影院 丁香婷婷校园小说网 方菅理伦 渡濑晶皮革捆绑 风间由美色诱电影 春暖花开手机新版入口2018 大婊哥资源群 大神夜店搭讪极品美女带回高级寓所阳台一直干到莎发 经典乘骑姿势狂操 强奸乱伦狠狠爱电影 青娱乐kk4d 男子日逼视频xxx 天天操天干天干天天日天天影院 涩涩91 黑色性交视频特级片人与兽性交特级片 看看福利视频92 波儿影院 大奶肥婆自拍偷拍图片 我下载可以播放的性爱视频,不是白的文库 日本美女内射高清视频 祥仔视觉av大片合集 【幼女】可爱小幼女逼毛没齐被一圈男人凌辱轮姦饮尿中出凌 27福利午夜福利视频 一发失误动漫无修全集在线观看 亚洲在线XOⅩO日本在线 一本到影院网 上原亚衣在线高清 茉哥影视 ftp 湿妹影院破解vip 日本动漫 搜一搜 国产在线服了 神马影院三级片 magnet zipai国模 菲菲视频爱俺要射 欧美兽交在线高清视频 动态性爱视频观看 亚洲阿v天堂网2018无码 舔阴资源 mp4 理论片左线212无遮挡 琪琪影院77lcd 佐佐木明希未亡人在线视频 8x8x8x成人网华人 多人做人爱的视频图片 叶子楣三级末删视频 草他妈比视频 五五电影院伦理片 日本中文字幕 国产自拍 影音 干妹妹高清在线影院 啪啪影院性交视频 国产剧情精品小视频 【欧美性爱-日本AV不卡在线观看_不卡的无码高清的av_日本老人av在线观看】htt 中国是一级毛片 天天看天天干天天啪 一级特黄特级黄色男女裸体性交毛片 仔仔网伦理 小鸡巴在线 伦理聚合111pdy shanrnxingjiaoshipin 磁力链 好莱坞女明星 国产女和狗迅雷 酒色网啪啪 我要爱久久影视插插 av中字有码在线观看免播放器 国产av乱视频 kkkk1112视频 微信黄群免费试看 手机在线播放黑鹰坠落h未删减版 超碰页面60down 黄页网站视频免费会员 - 百度 eee778最新网址 啪啪啪电影漫画 淫妻丝纪 成人日屄视频 涩播音频 AW影院 苍苍午夜免费观看 三级 心动女友第七天福利视频 性骚做爱吃奶视频 性春暖花开自拍新手区 性感女秘书大奶磁力链接 闲人吧大香蕉在线视频 性欧美hdee 绪川里绪无码作品电影 了解背后的我av美国电影 百姓福利导航 水原梨花加勒比影音先锋 av、367,pw www5595con 哥也射综合 秋霞网站无码片免费观看 4438x8成人全国免费视频 温婉不雅视频 ftp 丛林赤子心百度贴吧 大波女孩穿内衣视频 立场 免费在线看 外国免费影院100000 网红美女磁力链接 日韩写真福利在线视频 淫乱视频操屄 俄罗斯女人操B 做爱视频免费安装网站 人妻av视屏在线免费观看 大奶国语 magnet 人人射体内射在线视频 94福利社区会员 1769在线视频一七六 sm调教灌肠视频在线观看 五月婷婷六月合俺来也漫画 free欧美人狗互交 91路 婷婷九月丁香 超碰920在线 亚洲va天堂无码在绒 Av3847 杨幂1分11秒西瓜在线 在线福利电影字幕免费观看 不可能完成的任务H版 本多翼手机在线播放 彩画堂界限 免费黄色AV gg267c0m 91爱爱社区 99操人人操 人人妻人人超人人一本道 高清欧美16P 前田香织高清无码 合集 调教丝袜在线播放 bbi-206 se sex 亲友恋爱中文无码 下载 明星合成稀缺资源共享最新地址 亚洲 gegese 爆乳操逼视频 超碰97大香蕉4438x saozi8伦理 sepapac 插庇 干妞样免费视频百度 五月天热巴合成视频 射寡妇影院 内射妹子免费视频 pppd481在线观看无码 欧美性爱视频熬门赌场 鸡吧操屄视频 1啊无套清晰 日本视频淫乱 偷拍舍友资源 紫禁城轶事哪里能看 VID688 中村知惠有码在线视频 日韩AV黑木真由美电影首页 喷血推荐超稀有电视房 狠狠干迅雷下载 2019亚洲国内无码偷拍视频 内射,宾馆干炮视频 丝袜少妇3p在线视频 成人伦理福利 qvrt-049磁力链接 百度日本一本道免费视 熟女人妻中出 北京屄屄屄屄屄屄屄 逼淫女视频 操逼捏奶抽插视频 国产在线做爱免费视频 草榴在线自拍 国产在线观看乳交 色欧美1314 正在播放国中女生乳房真嫩 宾馆夫妻干活偷拍视频 莉哥不雅视频下载 极品白嫩美女主播极尽诱惑,喜欢的不要错过16 破处黄色视频 一级性交做爱黄片 俺也去综合网在线视频 天天草在线观看视频 涊涊影库男女视频 988大鸡巴插美女大逼视 91巨网站 亚洲av剧情 wwwjingziwou8 一个大道伊人怡红院 琪琪电影污垢 天无日天天射久草 莲实克蕾儿在线看 丁香我网五月婷婷 外国兽交视频 免费的播放器 调教堕落中日韩美女 手机2014国产自拍 每日更新无吗自拍视频 色喜国模视频在线播放 乱交在线视频 山田直美种子 精品美女_99夜色福利 姐弟抽插拍拍免费视频 强奸内射 在线观看 鸡巴插入屁股啪啪啪 七次狼青青青最新 近亲乱伦视频 热门推荐美女动态图片高潮XX00又黄又色动态图 奇米影视网站资源 井上瞳在线播放 簧鋍片kk guochantoupai ipz-576在线播放 伦理图片视频亚洲曰本 日本女优老湿影院av成人影院 岛国种子搬运工官网 eeussa cok 超级喵喵搜索网站 亚洲医生与护士10p 抖音被封漏点的视频 4338x华人永久免费 黄色录像一级带干逼逼的一级带不穿裤子的一起带 女生寝室的娇喘声 黄色视频资源吧 黄色自慰视频打算大全 女校长得私欲 黄色ppp小视频 后入中学生操逼视频网站 女神主播脱衣热舞福利迅雷下载 gegezyzy 大香蕉狼狼日伊人 青野圣罗种子 涩涩禾 媚心主播户外搭帐篷 波多野一本道在线播放 尤酷伦理片 色色电影 ed2k 92看看c 叉牛b 色播五月亚洲综合网站 - 百度 成wwwccc977 完美看看鬼父 福利簧片 xxxvdynom [黑白中文]良太要继续偷东西 国内真实偷拍 手机在线 农民av 含羞草在线视频 国产 国产自拍偷拍在线导航 国产主播无码迅雷 国产真实换妻在线 内裤哥操S形黑丝连体服高跟巨乳的客房经理丰满大奶子齐B小短裙看着就想操 内蒙包头小骚女办公室小露脸自拍第三弹 超级艳舞 免费线上高清a片直播 免费点播大姨妈影院 松坂季実子17磁力链接 呦呦禁处 自拍偷拍福利社在线 shenaijiq 一本道黄色免费视频 亚洲熟女伦理 影院 久草色新在线影院 色喇叭超碰在线视频 欧美家庭派对磁链接magnet 国产小琴 91福利动漫 magnet 操同事的小女友爱剪辑 微拍福利伦理片 丁香五月欧洲视频播放 888kbkb 东方四虎av伊人 插萝莉影院 1315影院 久久口交插萝莉影院 神父AⅤ动漫在线观看 三级纶理片 青春草伊人免费视频6 可以播放的视频 孕交 8aay路com 青青草福利视频播放 成人MV无码女优 hulisecon 欧美做爱色图30p 美女内射套图 聊斋之淫荡狐仙 新色猫咪导航网站 有一个女模特叫马什么 美屄图片网 张筱雨的淫荡 诱惑人体艺术一级图片 姐姐教我射 美国十次啦就爱干处女 欧洲下体艺术照 人体艺术日本电脑 骚逼做爱故事 女色幼rmvb 性爱乱p激情 欧美淫sex 长篇幼幼小说 影音先锋假阳具插美女 淫学生做爱图 qvod插菊花网 大陆顶级成人a片 裸体静相 美少女乳房阳部人体艺术图