diff --git a/.agents/AGENTS.md b/.agents/AGENTS.md index 353ea2e..737dfb6 100644 --- a/.agents/AGENTS.md +++ b/.agents/AGENTS.md @@ -28,6 +28,11 @@ server. - After generating the tool, call the tool to actually test it. - When testing by calling the tool, prefer to not set the `jsRender` parameter. +# Releasing + +- When bumping server versions, make sure all instances of the old server version are updated. These + are in `package.json`, `server.json` and possibly in other files. + # Documentation - Update readme with new tool, toolset and parameter information. diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..7709173 --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,31 @@ +name: Pull request + +on: + pull_request: + types: [opened, synchronize, reopened] + +concurrency: + group: pr-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + test: + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout + uses: actions/checkout@v5 + + - name: Setup Node.js + uses: actions/setup-node@v5 + with: + node-version: 'lts/*' + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Run unit tests + run: npm test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index eb8bc33..74dca4f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,56 +11,7 @@ concurrency: cancel-in-progress: false jobs: - semantic-release: - runs-on: ubuntu-latest - outputs: - released: ${{ steps.release.outputs.released }} - tag: ${{ steps.release.outputs.tag }} - permissions: - contents: write - issues: write - pull-requests: write - id-token: write - - steps: - - name: Checkout - uses: actions/checkout@v5 - with: - fetch-depth: 0 - persist-credentials: true - - - name: Setup Node.js - uses: actions/setup-node@v5 - with: - node-version: 'lts/*' - registry-url: 'https://registry.npmjs.org' - - - name: Install dependencies - run: npm ci - - - name: Build - run: npm run build --if-present - - - name: Semantic release - id: release - env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - run: | - set -e - BEFORE=$(git describe --tags --abbrev=0 2>/dev/null || true) - npx semantic-release - AFTER=$(git describe --tags --abbrev=0 2>/dev/null || true) - if [ -n "$AFTER" ] && [ "$BEFORE" != "$AFTER" ]; then - echo "released=true" >> "$GITHUB_OUTPUT" - echo "tag=$AFTER" >> "$GITHUB_OUTPUT" - else - echo "released=false" >> "$GITHUB_OUTPUT" - fi - mcp-publish: - needs: semantic-release - if: needs.semantic-release.outputs.released == 'true' runs-on: ubuntu-latest permissions: id-token: write @@ -70,7 +21,7 @@ jobs: - name: Checkout tag uses: actions/checkout@v5 with: - ref: ${{ needs.semantic-release.outputs.tag }} + ref: main - name: Setup Node.js uses: actions/setup-node@v5 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 854139a..2b5fa63 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1 +1,28 @@ -# Contributing +# Publishing + +When After merging changes to the server, the following steps need to be done: + +1. Server version bump (major/minor/patch) + +2. Publish to MCP registry. + +## Server version bump + +Publishing is done manually through a developer machine. This enabled bundling multiple +features/fixes under one release. + +When you want to publish a new version: + +- Pull `main` locally. + +- Bump the server version numbers in the following files: `package.json`, `server.json`. Note that + there may be more files where the version number + +- Choose between major, minor or patch bump accordingly. + +- Run `npm publish`. This command should instruct you how to authenticate with NPM. The full command + should look like this: + +``` +NPM_CONFIG__AUTHTOKEN=npm publish --access public +``` diff --git a/README.md b/README.md index 877af43..7b7ee02 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ services, streamlining access to our tools and capabilities. 1. Clone this repository: ``` -git clone https://github.com/Decodo/decodo-mcp-server +git clone https://github.com/Decodo/mcp-server ``` 2. Run the following commands in the terminal: @@ -151,31 +151,31 @@ The server exposes the following tools: | `scrape_as_markdown` | Scrapes any target URL, expects a URL to be given via prompt. Returns results in Markdown. | Scrape peacock.com from a US IP address and tell me the pricing. | | `screenshot` | Captures a screenshot of any webpage and returns it as a PNG image. | Take a screenshot of github.com from a US IP address. | | `google_search` | Scrapes Google Search for a given query, and returns parsed results. | Scrape Google Search for shoes and tell me the top position. | -| `google_ads` | Scrapes Google Ads search results. | Scrape Google Ads for laptop and show me the top ads. | -| `google_lens` | Scrapes Google Lens image search results. | Search Google Lens for this image: https://example.com/image.jpg | -| `google_ai_mode` | Scrapes Google AI Mode (Search with AI) results. | Ask Google AI Mode: What are the top three dog breeds? | +| `google_ads` | Scrapes Google Ads search results. | Scrape Google Ads for laptop and show me the top ads. | +| `google_lens` | Scrapes Google Lens image search results. | Search Google Lens for this image: https://example.com/image.jpg | +| `google_ai_mode` | Scrapes Google AI Mode (Search with AI) results. | Ask Google AI Mode: What are the top three dog breeds? | | `google_travel_hotels` | Scrapes Google Travel Hotels search results. | Search Google Travel Hotels for hotels in Paris. | | `amazon_search` | Scrapes Amazon Search for a given query, and returns parsed results. | Scrape Amazon Search for wireless keyboard. | -| `amazon_product` | Scrapes Amazon Product page. | Scrape Amazon product B09H74FXNW and show me the details. | -| `amazon_pricing` | Scrapes Amazon Product pricing information. | Get pricing for Amazon product B09H74FXNW. | -| `amazon_sellers` | Scrapes Amazon Seller information. | Get information about Amazon seller A1R0Z7FJGTKESH. | -| `amazon_bestsellers` | Scrapes Amazon Bestsellers list. | Show me Amazon bestsellers in electronics. | +| `amazon_product` | Scrapes Amazon Product page. | Scrape Amazon product B09H74FXNW and show me the details. | +| `amazon_pricing` | Scrapes Amazon Product pricing information. | Get pricing for Amazon product B09H74FXNW. | +| `amazon_sellers` | Scrapes Amazon Seller information. | Get information about Amazon seller A1R0Z7FJGTKESH. | +| `amazon_bestsellers` | Scrapes Amazon Bestsellers list. | Show me Amazon bestsellers in electronics. | | `walmart_search` | Scrapes Walmart Search for a given query, and returns parsed results. | Scrape Walmart Search for camping tent. | -| `walmart_product` | Scrapes Walmart Product page. | Scrape Walmart product 15296401808. | +| `walmart_product` | Scrapes Walmart Product page. | Scrape Walmart product 15296401808. | | `target_search` | Scrapes Target Search for a given query, and returns parsed results. | Scrape Target Search for kitchen appliances. | -| `target_product` | Scrapes Target Product page. | Scrape Target product 92186007. | +| `target_product` | Scrapes Target Product page. | Scrape Target product 92186007. | | `tiktok_post` | Scrapes a TikTok post URL for structured data (e.g. engagement, caption, hashtags). | Scrape this TikTok post: https://www.tiktok.com/@nba/video/7393013274725403950 | | `tiktok_shop_search` | Scrapes TikTok Shop Search for a given query, and returns parsed results. | Scrape TikTok Shop Search for phone cases. | | `tiktok_shop_product` | Scrapes TikTok Shop Product page. | Scrape TikTok Shop product 1731541214379741272. | | `tiktok_shop_url` | Scrapes TikTok Shop page by URL. | Scrape this TikTok Shop URL: https://www.tiktok.com/shop/s?q=HEADPHONES | | `youtube_metadata` | Scrapes YouTube video metadata. | Get metadata for YouTube video dFu9aKJoqGg. | -| `youtube_channel` | Scrapes YouTube channel videos. | Scrape YouTube channel @decodo_official. | +| `youtube_channel` | Scrapes YouTube channel videos. | Scrape YouTube channel @decodo_official. | | `youtube_subtitles` | Scrapes YouTube video subtitles. | Get subtitles for YouTube video L8zSWbQN-v8. | | `youtube_search` | Search YouTube videos. | Search YouTube for "How to care for chinchillas". | | `reddit_post` | Scrapes a specific Reddit post. | Scrape the following Reddit post: https://www.reddit.com/r/horseracing/comments/1nsrn3/ | -| `reddit_subreddit` | Scrapes Reddit subreddit results. | Scrape the top 5 posts on r/Python this week. | +| `reddit_subreddit` | Scrapes Reddit subreddit results. | Scrape the top 5 posts on r/Python this week. | | `reddit_user` | Scrapes a Reddit user profile and their posts/comments. | Scrape this Reddit user: https://www.reddit.com/user/IWasRightOnce/ | -| `bing_search` | Scrapes Bing Search results. | Search Bing for laptop reviews. | +| `bing_search` | Scrapes Bing Search results. | Search Bing for laptop reviews. | | `chatgpt` | Search and interact with ChatGPT for AI-powered responses and conversations. | Ask ChatGPT to explain quantum computing in simple terms. | | `perplexity` | Search and interact with Perplexity for AI-powered responses and conversations. | Ask Perplexity what the latest trends in web development are. | diff --git a/package-lock.json b/package-lock.json index 69f6190..cd01d14 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "license": "ISC", "dependencies": { "@modelcontextprotocol/sdk": "^1.12.3", @@ -599,6 +599,70 @@ "node": ">=12" } }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/darwin-arm64": { "version": "0.27.7", "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", @@ -615,6 +679,342 @@ "node": ">=18" } }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", @@ -2522,46 +2922,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@jest/transform": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.0.0.tgz", - "integrity": "sha512-8xhpsCGYJsUjqpJOgLyMkeOSSlhqggFZEWAnZquBsvATtueoEs7CkMRxOUmJliF3E5x+mXmZ7gEEsHank029Og==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@babel/core": "^7.27.4", - "@jest/types": "30.0.0", - "@jridgewell/trace-mapping": "^0.3.25", - "babel-plugin-istanbul": "^7.0.0", - "chalk": "^4.1.2", - "convert-source-map": "^2.0.0", - "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.11", - "jest-haste-map": "30.0.0", - "jest-regex-util": "30.0.0", - "jest-util": "30.0.0", - "micromatch": "^4.0.8", - "pirates": "^4.0.7", - "slash": "^3.0.0", - "write-file-atomic": "^5.0.1" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - } - }, - "node_modules/@jest/transform/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, "node_modules/@jest/types": { "version": "30.0.0", "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.0.0.tgz", @@ -5196,14 +5556,6 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@ungap/structured-clone": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", - "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/accepts": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", @@ -5453,63 +5805,6 @@ "proxy-from-env": "^2.1.0" } }, - "node_modules/babel-jest": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.0.0.tgz", - "integrity": "sha512-JQ0DhdFjODbSawDf0026uZuwaqfKkQzk+9mwWkq2XkKFIaMhFVOxlVmbFCOnnC76jATdxrff3IiUAvOAJec6tw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@jest/transform": "30.0.0", - "@types/babel__core": "^7.20.5", - "babel-plugin-istanbul": "^7.0.0", - "babel-preset-jest": "30.0.0", - "chalk": "^4.1.2", - "graceful-fs": "^4.2.11", - "slash": "^3.0.0" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.11.0" - } - }, - "node_modules/babel-plugin-istanbul": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.0.tgz", - "integrity": "sha512-C5OzENSx/A+gt7t4VH1I2XsflxyPUmXRFPKBxt33xncdOmq7oROVM3bZv9Ysjjkv8OJYDMa+tKuKMvqU/H3xdw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.3", - "istanbul-lib-instrument": "^6.0.2", - "test-exclude": "^6.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/babel-plugin-jest-hoist": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.0.0.tgz", - "integrity": "sha512-DSRm+US/FCB4xPDD6Rnslb6PAF9Bej1DZ+1u4aTiqJnk7ZX12eHsnDiIOqjGvITCq+u6wLqUhgS+faCNbVY8+g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.27.3", - "@types/babel__core": "^7.20.5" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - } - }, "node_modules/babel-preset-current-node-syntax": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", @@ -5536,24 +5831,6 @@ "@babel/core": "^7.0.0" } }, - "node_modules/babel-preset-jest": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.0.0.tgz", - "integrity": "sha512-hgEuu/W7gk8QOWUA9+m3Zk+WpGvKc1Egp6rFQEfYxEoM9Fk/q8nuTXNL65OkhwGrTApauEGgakOoWVXj+UfhKw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "babel-plugin-jest-hoist": "30.0.0", - "babel-preset-current-node-syntax": "^1.1.0" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.11.0" - } - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -9821,32 +10098,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-haste-map": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.0.0.tgz", - "integrity": "sha512-p4bXAhXTawTsADgQgTpbymdLaTyPW1xWNu1oIGG7/N3LIAbZVkH2JMJqS8/IUcnGR8Kc7WFE+vWbJvsqGCWZXw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@jest/types": "30.0.0", - "@types/node": "*", - "anymatch": "^3.1.3", - "fb-watchman": "^2.0.2", - "graceful-fs": "^4.2.11", - "jest-regex-util": "30.0.0", - "jest-util": "30.0.0", - "jest-worker": "30.0.0", - "micromatch": "^4.0.8", - "walker": "^1.0.8" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - }, - "optionalDependencies": { - "fsevents": "^2.3.3" - } - }, "node_modules/jest-leak-detector": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", @@ -11369,24 +11620,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-worker": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.0.0.tgz", - "integrity": "sha512-VZvxfWIybIvwK8N/Bsfe43LfQgd/rD0c4h5nLUx78CAqPxIQcW2qDjsVAC53iUR8yxzFIeCFFvWOh8en8hGzdg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@types/node": "*", - "@ungap/structured-clone": "^1.3.0", - "jest-util": "30.0.0", - "merge-stream": "^2.0.0", - "supports-color": "^8.1.1" - }, - "engines": { - "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" - } - }, "node_modules/jest/node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -17259,21 +17492,6 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, - "node_modules/write-file-atomic": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", - "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/ws": { "version": "8.20.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", diff --git a/package.json b/package.json index 49d5800..f2ec136 100644 --- a/package.json +++ b/package.json @@ -1,15 +1,16 @@ { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "description": "Decodo MCP Server", "bin": { "decodo-mcp": "./build/index.js" }, "main": "index.js", - "mcpName": "io.github.Decodo/mcp-web-scraper", + "mcpName": "io.github.Decodo/mcp-server", "scripts": { "build": "tsc && chmod 755 build/index.js build/server.js", - "dev": "nodemon --watch src -e ts --exec 'npm run build'", + "dev:http": "nodemon --watch src -e ts --exec 'tsx src/server.ts'", + "dev:stdio": "nodemon --watch src -e ts --exec 'npm run build'", "start": "node build/server.js", "inspect": "mcp-inspector", "test": "jest", diff --git a/server.json b/server.json index 9ce1246..5d2b81a 100644 --- a/server.json +++ b/server.json @@ -1,19 +1,19 @@ { "$schema": "https://static.modelcontextprotocol.io/schemas/2025-09-16/server.schema.json", - "name": "io.github.Decodo/mcp-web-scraper", + "name": "io.github.Decodo/mcp-server", "description": "Enable your AI agents to scrape and parse web content dynamically, including geo-restricted sites", "status": "active", "repository": { - "url": "https://github.com/Decodo/mcp-web-scraper", + "url": "https://github.com/Decodo/mcp-server", "source": "github" }, - "version": "1.2.0", + "version": "1.2.1", "packages": [ { "registryType": "npm", "registryBaseUrl": "https://registry.npmjs.org", "identifier": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "transport": { "type": "stdio" } diff --git a/src/clients/__tests__/scraper-api-client.test.ts b/src/clients/__tests__/scraper-api-client.test.ts index c8025b7..c6963f1 100644 --- a/src/clients/__tests__/scraper-api-client.test.ts +++ b/src/clients/__tests__/scraper-api-client.test.ts @@ -7,7 +7,7 @@ jest.mock('axios'); const mockedAxios = axios as jest.Mocked; -const client = new ScraperApiClient(); +const client = new ScraperApiClient({ maxRetries: 1, delayMs: 0 }); const defaultArgs = { auth: 'dGVzdDp0ZXN0', scrapingParams: { url: 'https://example.com' }, @@ -48,6 +48,14 @@ beforeEach(() => { describe('ScraperApiClient', () => { describe('scrape - error handling', () => { + beforeEach(() => { + jest.spyOn(console, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + jest.mocked(console.error).mockRestore(); + }); + it('throws friendly message on 401', async () => { mockedAxios.request.mockRejectedValue( createAxiosError({ status: 401, message: 'Unauthorized' }) diff --git a/src/clients/retry.ts b/src/clients/retry.ts new file mode 100644 index 0000000..c256af8 --- /dev/null +++ b/src/clients/retry.ts @@ -0,0 +1,52 @@ +import { AxiosError } from 'axios'; + +export const MAX_RETRIES = Math.max(0, parseInt(process.env.MAX_RETRIES ?? '2', 10) || 2); +export const RETRYABLE_STATUS_CODES = new Set([429, 502, 503, 504]); +export const RETRYABLE_NETWORK_CODES = new Set([ + 'ECONNRESET', + 'ETIMEDOUT', + 'ECONNABORTED', + 'ENOTFOUND', +]); +export const WAITING_INITIAL_DELAY_MS = 3000; +export const WAITING_INTERVAL_MS = 5000; + +export const BASE_RETRY_DELAY_MS = 1000; + +export const isRetryable = (error: AxiosError): boolean => { + if (error.response) { + return RETRYABLE_STATUS_CODES.has(error.response.status); + } + return RETRYABLE_NETWORK_CODES.has(error.code ?? ''); +}; + +export const getRetryDelay = ({ + attempt, + error, + baseDelayMs = BASE_RETRY_DELAY_MS, +}: { + attempt: number; + error: AxiosError; + baseDelayMs?: number; +}): number => { + if (error.response?.status === 429) { + const retryAfter = error.response.headers['retry-after']; + if (retryAfter) { + const seconds = Number(retryAfter); + if (!isNaN(seconds)) { + return seconds * 1000; + } + + const date = Date.parse(retryAfter); + if (!isNaN(date)) { + return Math.max(0, date - Date.now()); + } + } + } + + const baseMs = baseDelayMs * Math.pow(2, attempt); + const jitterMs = Math.random() * 500; + return baseMs + jitterMs; +}; + +export const sleep = (ms: number): Promise => new Promise(resolve => setTimeout(resolve, ms)); diff --git a/src/clients/scraper-api-client.ts b/src/clients/scraper-api-client.ts index 13733a4..9ad68f4 100644 --- a/src/clients/scraper-api-client.ts +++ b/src/clients/scraper-api-client.ts @@ -1,42 +1,33 @@ -import axios, { AxiosError, AxiosResponse } from 'axios'; +import axios, { AxiosResponse } from 'axios'; import { ScraperApiResponseData } from './types'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; +import { ProgressNotifier, ProgressExtra } from '../utils'; +import { + BASE_RETRY_DELAY_MS, + getRetryDelay, + isRetryable, + MAX_RETRIES, + sleep, + WAITING_INITIAL_DELAY_MS, + WAITING_INTERVAL_MS, +} from './retry'; -const MAX_RETRIES = Math.max(0, parseInt(process.env.MAX_RETRIES ?? '2', 10) || 2); -const RETRYABLE_STATUS_CODES = new Set([429, 502, 503, 504]); -const RETRYABLE_NETWORK_CODES = new Set(['ECONNRESET', 'ETIMEDOUT', 'ECONNABORTED', 'ENOTFOUND']); +export class ScraperApiClient { + maxRetries: number; -const isRetryable = (error: AxiosError): boolean => { - if (error.response) { - return RETRYABLE_STATUS_CODES.has(error.response.status); - } - return RETRYABLE_NETWORK_CODES.has(error.code ?? ''); -}; - -const getRetryDelay = (attempt: number, error: AxiosError): number => { - if (error.response?.status === 429) { - const retryAfter = error.response.headers['retry-after']; - if (retryAfter) { - const seconds = Number(retryAfter); - if (!isNaN(seconds)) { - return seconds * 1000; - } + delayMs: number; - const date = Date.parse(retryAfter); - if (!isNaN(date)) { - return Math.max(0, date - Date.now()); - } - } + constructor({ + maxRetries = MAX_RETRIES, + delayMs = BASE_RETRY_DELAY_MS, + }: { + maxRetries?: number; + delayMs?: number; + } = {}) { + this.maxRetries = maxRetries; + this.delayMs = delayMs; } - const baseMs = 1000 * Math.pow(2, attempt); - const jitterMs = Math.random() * 500; - return baseMs + jitterMs; -}; - -const sleep = (ms: number): Promise => new Promise(resolve => setTimeout(resolve, ms)); - -export class ScraperApiClient { transformScrapingParams = ({ scrapingParams, }: { @@ -60,68 +51,86 @@ export class ScraperApiClient { scrape = async ({ auth, scrapingParams, + extra, }: { auth: string; scrapingParams: ScrapingMCPParams; + extra?: ProgressExtra; }) => { - const transformedParams = this.transformScrapingParams({ scrapingParams }); - - const url = process.env.DECODO_SAPI_HOST || 'https://scraper-api.decodo.com'; - - let lastError: unknown; - - for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { - try { - const res = await axios.request>({ - url: `${url}/v2/scrape`, - method: 'POST', - headers: { - authorization: `Basic ${auth}`, - 'x-integration': 'mcp', - }, - timeout: 180000, - data: { - ...transformedParams, - }, - }); - - return this.transformResponse({ res }); - } catch (error) { - lastError = error; - - if (attempt < MAX_RETRIES && axios.isAxiosError(error) && isRetryable(error)) { - const delayMs = getRetryDelay(attempt, error); - const reason = error.response - ? `status ${error.response.status}` - : `network error ${error.code}`; - - console.error( - `[scraper-api-client] Retry ${ - attempt + 1 - }/${MAX_RETRIES} after ${reason}, waiting ${Math.round(delayMs)}ms` - ); - - await sleep(delayMs); - continue; - } + const notifier = new ProgressNotifier(extra); - break; - } - } + try { + await notifier.notify('Submitting request to Decodo API...', 0, 1); - if (axios.isAxiosError(lastError)) { - const status = lastError.response?.status; - let errorMessage = lastError.response?.data?.message ?? lastError.message; + notifier.startWaitingNotifications(WAITING_INITIAL_DELAY_MS, WAITING_INTERVAL_MS); - if (status === 401) { - errorMessage = 'Authentication failed.'; - } - if (status === 429) { - errorMessage = JSON.stringify(lastError.response?.data); + const transformedParams = this.transformScrapingParams({ scrapingParams }); + const url = process.env.DECODO_SAPI_HOST || 'https://scraper-api.decodo.com'; + + let lastError: unknown; + + for (let attempt = 0; attempt <= this.maxRetries; attempt++) { + try { + const res = await axios.request>({ + url: `${url}/v2/scrape`, + method: 'POST', + headers: { + authorization: `Basic ${auth}`, + 'x-integration': 'mcp', + }, + timeout: 180000, + data: { + ...transformedParams, + }, + }); + + notifier.stopWaitingNotifications(); + + await notifier.notify('Processing response...', 0.9, 1); + + return this.transformResponse({ res }); + } catch (error) { + lastError = error; + + if (attempt < MAX_RETRIES && axios.isAxiosError(error) && isRetryable(error)) { + // const delayMs = getRetryDelay(attempt, error); + const delayMs = getRetryDelay({ attempt, error, baseDelayMs: this.delayMs }); + const reason = error.response + ? `status ${error.response.status}` + : `network error ${error.code}`; + + console.error( + `[scraper-api-client] Retry ${ + attempt + 1 + }/${MAX_RETRIES} after ${reason}, waiting ${Math.round(delayMs)}ms` + ); + + await notifier.notify(`Retrying (${attempt + 1}/${MAX_RETRIES})...`, 0.1, 1); + + await sleep(delayMs); + continue; + } + + break; + } } - throw new Error(`Scraper API request failed (${status}): ${errorMessage}`); + if (axios.isAxiosError(lastError)) { + const status = lastError.response?.status; + let errorMessage = lastError.response?.data?.message ?? lastError.message; + + if (status === 401) { + errorMessage = 'Authentication failed.'; + } + if (status === 429) { + errorMessage = JSON.stringify(lastError.response?.data); + } + + throw new Error(`Scraper API request failed (${status}): ${errorMessage}`); + } + throw lastError; + } finally { + notifier.stopWaitingNotifications(); } - throw lastError; }; } diff --git a/src/server.ts b/src/server.ts index 3a2ead4..1f650e2 100644 --- a/src/server.ts +++ b/src/server.ts @@ -35,7 +35,7 @@ app.post('/mcp', async (req, res) => { const transport = new StreamableHTTPServerTransport({ sessionIdGenerator: undefined, - enableJsonResponse: true, + enableJsonResponse: false, }); res.on('close', () => { diff --git a/src/server/__tests__/server-registration.test.ts b/src/server/__tests__/server-registration.test.ts index 5dac45b..6c78724 100644 --- a/src/server/__tests__/server-registration.test.ts +++ b/src/server/__tests__/server-registration.test.ts @@ -21,7 +21,7 @@ describe('Server registration', () => { expect(McpServer).toHaveBeenCalledWith({ name: 'decodo', - version: '1.2.0', + version: '1.2.1', }); }); diff --git a/src/server/sapi-base-server.ts b/src/server/sapi-base-server.ts index fd112cd..79c6164 100644 --- a/src/server/sapi-base-server.ts +++ b/src/server/sapi-base-server.ts @@ -1,6 +1,7 @@ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { ScraperApiClient } from '../clients/scraper-api-client'; +import { PACKAGE_VERSION } from '../version'; import { AmazonSearchTool, AmazonProductTool, @@ -47,9 +48,9 @@ export class ScraperAPIBaseServer { constructor({ auth, toolsets = [] }: { auth: string; toolsets: TOOLSET[] }) { this.server = new McpServer({ name: 'decodo', - version: '1.2.0', + version: PACKAGE_VERSION, }); - this.sapiClient = new ScraperApiClient(); + this.sapiClient = new ScraperApiClient({}); this.auth = auth; diff --git a/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts b/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts index 8e481d7..0d5d00c 100644 --- a/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts +++ b/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDomain = z .string() @@ -37,14 +38,14 @@ export class AmazonBestsellersTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_BESTSELLERS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/amazon-pricing/amazon-pricing-tool.ts b/src/tools/amazon-pricing/amazon-pricing-tool.ts index fd0a24e..9b8905b 100644 --- a/src/tools/amazon-pricing/amazon-pricing-tool.ts +++ b/src/tools/amazon-pricing/amazon-pricing-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -51,14 +51,14 @@ export class AmazonPricingTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_PRICING, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-product/amazon-product-tool.ts b/src/tools/amazon-product/amazon-product-tool.ts index 401dcdc..eb714c2 100644 --- a/src/tools/amazon-product/amazon-product-tool.ts +++ b/src/tools/amazon-product/amazon-product-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -45,14 +45,14 @@ export class AmazonProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_PRODUCT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-search/amazon-search-tool.ts b/src/tools/amazon-search/amazon-search-tool.ts index 7399e1a..9030059 100644 --- a/src/tools/amazon-search/amazon-search-tool.ts +++ b/src/tools/amazon-search/amazon-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -46,14 +46,14 @@ export class AmazonSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-sellers/amazon-sellers-tool.ts b/src/tools/amazon-sellers/amazon-sellers-tool.ts index 3a7dbac..bec53b5 100644 --- a/src/tools/amazon-sellers/amazon-sellers-tool.ts +++ b/src/tools/amazon-sellers/amazon-sellers-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDomain = z .string() @@ -38,14 +39,14 @@ export class AmazonSellersTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_SELLERS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/bing-search/bing-search-tool.ts b/src/tools/bing-search/bing-search-tool.ts index dd03ac8..fe5a6f6 100644 --- a/src/tools/bing-search/bing-search-tool.ts +++ b/src/tools/bing-search/bing-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -47,14 +47,14 @@ export class BingSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.BING_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/chatgpt/chatgpt-tool.ts b/src/tools/chatgpt/chatgpt-tool.ts index b7416bb..45931cf 100644 --- a/src/tools/chatgpt/chatgpt-tool.ts +++ b/src/tools/chatgpt/chatgpt-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodGeo } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ChatGPTTool extends Tool { toolset = TOOLSET.AI; @@ -26,14 +27,14 @@ export class ChatGPTTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.CHATGPT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-ads/google-ads-tool.ts b/src/tools/google-ads/google-ads-tool.ts index 6cdb9ca..b7f6617 100644 --- a/src/tools/google-ads/google-ads-tool.ts +++ b/src/tools/google-ads/google-ads-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -41,14 +41,14 @@ export class GoogleAdsTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_ADS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-ai-mode/google-ai-mode-tool.ts b/src/tools/google-ai-mode/google-ai-mode-tool.ts index b773f12..a9efe73 100644 --- a/src/tools/google-ai-mode/google-ai-mode-tool.ts +++ b/src/tools/google-ai-mode/google-ai-mode-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodGeo = z .string() @@ -31,14 +32,14 @@ export class GoogleAiModeTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_AI_MODE, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/google-lens/google-lens-tool.ts b/src/tools/google-lens/google-lens-tool.ts index 7e39add..68c0cc7 100644 --- a/src/tools/google-lens/google-lens-tool.ts +++ b/src/tools/google-lens/google-lens-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -33,14 +33,14 @@ export class GoogleLensTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_LENS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-search/google-search-tool.ts b/src/tools/google-search/google-search-tool.ts index 13dcfee..0095d07 100644 --- a/src/tools/google-search/google-search-tool.ts +++ b/src/tools/google-search/google-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -41,14 +41,14 @@ export class GoogleSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-travel-hotels/google-travel-hotels-tool.ts b/src/tools/google-travel-hotels/google-travel-hotels-tool.ts index 99d3620..341f31e 100644 --- a/src/tools/google-travel-hotels/google-travel-hotels-tool.ts +++ b/src/tools/google-travel-hotels/google-travel-hotels-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType, zodLocale } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodPageFrom = z .number() @@ -33,14 +34,14 @@ export class GoogleTravelHotelsTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_TRAVEL_HOTELS, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/perplexity/perplexity-tool.ts b/src/tools/perplexity/perplexity-tool.ts index 0b9cdbb..a3dbdf3 100644 --- a/src/tools/perplexity/perplexity-tool.ts +++ b/src/tools/perplexity/perplexity-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodGeo } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class PerplexityTool extends Tool { toolset = TOOLSET.AI; @@ -26,14 +27,14 @@ export class PerplexityTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.PERPLEXITY, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-post/reddit-post-tool.ts b/src/tools/reddit-post/reddit-post-tool.ts index 3a8d06a..d81f3c9 100644 --- a/src/tools/reddit-post/reddit-post-tool.ts +++ b/src/tools/reddit-post/reddit-post-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditPostTool extends Tool { @@ -34,13 +34,13 @@ export class RedditPostTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_POST, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-subreddit/reddit-subreddit-tool.ts b/src/tools/reddit-subreddit/reddit-subreddit-tool.ts index e2756fb..51a7d6a 100644 --- a/src/tools/reddit-subreddit/reddit-subreddit-tool.ts +++ b/src/tools/reddit-subreddit/reddit-subreddit-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditSubredditTool extends Tool { @@ -30,13 +30,13 @@ export class RedditSubredditTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_SUBREDDIT, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-user/reddit-user-tool.ts b/src/tools/reddit-user/reddit-user-tool.ts index 2051daf..0a370d1 100644 --- a/src/tools/reddit-user/reddit-user-tool.ts +++ b/src/tools/reddit-user/reddit-user-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditUserTool extends Tool { @@ -36,13 +36,13 @@ export class RedditUserTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_USER, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts b/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts index 3c43027..25c8e25 100644 --- a/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts +++ b/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts @@ -4,6 +4,7 @@ import { NodeHtmlMarkdown } from 'node-html-markdown'; import { zodGeo, zodJsRender, zodLocale, zodTokenLimit } from '../../zod/zod-types'; import { TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ScrapeAsMarkdownTool extends Tool { toolset = TOOLSET.WEB; @@ -56,8 +57,8 @@ export class ScrapeAsMarkdownTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { - const { data } = await sapiClient.scrape({ auth, scrapingParams }); + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { + const { data } = await sapiClient.scrape({ auth, scrapingParams, extra }); const { data: markdown, isTruncated } = this.transformResponse({ data, diff --git a/src/tools/screenshot/screenshot-tool.ts b/src/tools/screenshot/screenshot-tool.ts index 52a849a..2f1846c 100644 --- a/src/tools/screenshot/screenshot-tool.ts +++ b/src/tools/screenshot/screenshot-tool.ts @@ -3,6 +3,7 @@ import { ScrapingMCPParams } from 'types'; import { zodGeo } from '../../zod/zod-types'; import { TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ScreenshotTool extends Tool { toolset = TOOLSET.WEB; @@ -25,10 +26,11 @@ export class ScreenshotTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const { data } = await sapiClient.scrape({ auth, scrapingParams: { ...scrapingParams, headless: 'png' }, + extra, }); return { diff --git a/src/tools/target-product/target-product-tool.ts b/src/tools/target-product/target-product-tool.ts index f150c11..620a573 100644 --- a/src/tools/target-product/target-product-tool.ts +++ b/src/tools/target-product/target-product-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDeliveryZip = z.string().describe('ZIP code for delivery location').optional(); @@ -32,7 +33,7 @@ export class TargetProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { headless: 'html', ...scrapingParams, @@ -40,7 +41,7 @@ export class TargetProductTool extends Tool { parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/target-search/target-search-tool.ts b/src/tools/target-search/target-search-tool.ts index b1ee098..a0e3226 100644 --- a/src/tools/target-search/target-search-tool.ts +++ b/src/tools/target-search/target-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -39,7 +39,7 @@ export class TargetSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { headless: 'html', ...scrapingParams, @@ -47,7 +47,7 @@ export class TargetSearchTool extends Tool { markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-post/tiktok-post-tool.ts b/src/tools/tiktok-post/tiktok-post-tool.ts index 26f78d8..adda1d5 100644 --- a/src/tools/tiktok-post/tiktok-post-tool.ts +++ b/src/tools/tiktok-post/tiktok-post-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodXhr } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokPostTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -28,13 +29,13 @@ export class TiktokPostTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_POST, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts b/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts index 6d7caf6..387d378 100644 --- a/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts +++ b/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType, zodCountry } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokShopProductTool extends Tool { toolset = TOOLSET.ECOMMERCE; @@ -27,14 +28,14 @@ export class TiktokShopProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_PRODUCT, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts b/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts index 5184557..8cf0db5 100644 --- a/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts +++ b/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodJsRender, zodCountry, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -35,14 +35,14 @@ export class TiktokShopSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_SEARCH, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts b/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts index c099ac3..4deb13c 100644 --- a/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts +++ b/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokShopUrlTool extends Tool { toolset = TOOLSET.ECOMMERCE; @@ -25,13 +26,13 @@ export class TiktokShopUrlTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_URL, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/walmart-product/walmart-product-tool.ts b/src/tools/walmart-product/walmart-product-tool.ts index 9d5e952..5428094 100644 --- a/src/tools/walmart-product/walmart-product-tool.ts +++ b/src/tools/walmart-product/walmart-product-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -44,14 +44,14 @@ export class WalmartProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.WALMART_PRODUCT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/walmart-search/walmart-search-tool.ts b/src/tools/walmart-search/walmart-search-tool.ts index facd693..8041a15 100644 --- a/src/tools/walmart-search/walmart-search-tool.ts +++ b/src/tools/walmart-search/walmart-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -45,14 +45,14 @@ export class WalmartSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.WALMART_SEARCH, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/youtube-channel/youtube-channel-tool.ts b/src/tools/youtube-channel/youtube-channel-tool.ts index 2e2a7d4..37f70cb 100644 --- a/src/tools/youtube-channel/youtube-channel-tool.ts +++ b/src/tools/youtube-channel/youtube-channel-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodLimit = z .number() @@ -29,14 +30,14 @@ export class YoutubeChannelTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_CHANNEL, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-metadata/youtube-metadata-tool.ts b/src/tools/youtube-metadata/youtube-metadata-tool.ts index 1a3279b..2ea2fb7 100644 --- a/src/tools/youtube-metadata/youtube-metadata-tool.ts +++ b/src/tools/youtube-metadata/youtube-metadata-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class YoutubeMetadataTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -23,13 +24,13 @@ export class YoutubeMetadataTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_METADATA, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-search/youtube-search-tool.ts b/src/tools/youtube-search/youtube-search-tool.ts index 8e8b262..75b89d3 100644 --- a/src/tools/youtube-search/youtube-search-tool.ts +++ b/src/tools/youtube-search/youtube-search-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class YoutubeSearchTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -23,13 +24,13 @@ export class YoutubeSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_SEARCH, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-subtitles/youtube-subtitles-tool.ts b/src/tools/youtube-subtitles/youtube-subtitles-tool.ts index c1f5b6c..935b233 100644 --- a/src/tools/youtube-subtitles/youtube-subtitles-tool.ts +++ b/src/tools/youtube-subtitles/youtube-subtitles-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodLanguageCode = z .string() @@ -29,13 +30,13 @@ export class YoutubeSubtitlesTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_SUBTITLES, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/utils.ts b/src/utils.ts index 3148789..a5c280a 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,5 +1,8 @@ import { TOOLSET } from './constants'; +export { ProgressNotifier, withProgress } from './utils/progress'; +export type { ProgressExtra } from './utils/progress'; + export const resolveToolsets = (toolsets?: string): TOOLSET[] => { if (!toolsets?.trim()) { return []; diff --git a/src/utils/__tests__/progress.test.ts b/src/utils/__tests__/progress.test.ts new file mode 100644 index 0000000..9125fa0 --- /dev/null +++ b/src/utils/__tests__/progress.test.ts @@ -0,0 +1,189 @@ +import { ProgressNotifier, ProgressExtra } from '../progress'; + +describe('ProgressNotifier', () => { + describe('when progressToken is provided', () => { + it('sends progress notification', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0.5, + total: 1, + message: 'Processing...', + }, + }); + }); + + it('sends notification with default progress values', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Starting...'); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0, + total: 1, + message: 'Starting...', + }, + }); + }); + + it('schedules delayed notification', async () => { + jest.useFakeTimers(); + + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + + expect(timeout).not.toBeNull(); + expect(mockSendNotification).not.toHaveBeenCalled(); + + jest.advanceTimersByTime(3000); + await Promise.resolve(); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0, + total: 1, + message: 'Waiting...', + }, + }); + + if (timeout) clearTimeout(timeout); + jest.useRealTimers(); + }); + + it('silently ignores errors from sendNotification', async () => { + const mockSendNotification = jest.fn().mockRejectedValue(new Error('Network error')); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + + await expect(notifier.notify('Processing...')).resolves.toBeUndefined(); + }); + }); + + describe('when progressToken is NOT provided', () => { + it('does not send notification when progressToken is missing', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: {}, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).not.toHaveBeenCalled(); + }); + + it('does not send notification when _meta is missing', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).not.toHaveBeenCalled(); + }); + + it('does not schedule delayed notification', async () => { + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: jest.fn(), + sendRequest: jest.fn(), + }; + + const notifier = new ProgressNotifier(extra); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + + expect(timeout).toBeNull(); + }); + }); + + describe('when extra is undefined', () => { + it('does not throw when extra is undefined', async () => { + const notifier = new ProgressNotifier(undefined); + await expect(notifier.notify('Processing...')).resolves.toBeUndefined(); + }); + + it('returns null for delayed notification', async () => { + const notifier = new ProgressNotifier(undefined); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + expect(timeout).toBeNull(); + }); + }); + + describe('getElapsedMs', () => { + it('returns elapsed time since construction', async () => { + jest.useFakeTimers(); + + const notifier = new ProgressNotifier(undefined); + + expect(notifier.getElapsedMs()).toBe(0); + + jest.advanceTimersByTime(1500); + expect(notifier.getElapsedMs()).toBe(1500); + + jest.advanceTimersByTime(500); + expect(notifier.getElapsedMs()).toBe(2000); + + jest.useRealTimers(); + }); + }); +}); diff --git a/src/utils/progress.ts b/src/utils/progress.ts new file mode 100644 index 0000000..e9732e4 --- /dev/null +++ b/src/utils/progress.ts @@ -0,0 +1,325 @@ +import { ServerNotification, ServerRequest } from '@modelcontextprotocol/sdk/types.js'; +import { RequestHandlerExtra } from '@modelcontextprotocol/sdk/shared/protocol.js'; + +export type ProgressNotificationParams = { + progressToken: string | number; + progress: number; + total?: number; + message?: string; +}; + +export type ProgressExtra = RequestHandlerExtra; + +const WAITING_WORDS = [ + 'Accomplishing', + 'Actioning', + 'Actualizing', + 'Architecting', + 'Baking', + 'Beaming', + "Beboppin'", + 'Befuddling', + 'Billowing', + 'Blanching', + 'Bloviating', + 'Boogieing', + 'Boondoggling', + 'Booping', + 'Bootstrapping', + 'Brewing', + 'Bunning', + 'Burrowing', + 'Calculating', + 'Canoodling', + 'Caramelizing', + 'Cascading', + 'Catapulting', + 'Cerebrating', + 'Channeling', + 'Channelling', + 'Choreographing', + 'Churning', + 'Clauding', + 'Coalescing', + 'Cogitating', + 'Combobulating', + 'Composing', + 'Computing', + 'Concocting', + 'Considering', + 'Contemplating', + 'Cooking', + 'Crafting', + 'Creating', + 'Crunching', + 'Crystallizing', + 'Cultivating', + 'Deciphering', + 'Deliberating', + 'Determining', + 'Dilly-dallying', + 'Discombobulating', + 'Doing', + 'Doodling', + 'Drizzling', + 'Ebbing', + 'Effecting', + 'Elucidating', + 'Embellishing', + 'Enchanting', + 'Envisioning', + 'Evaporating', + 'Fermenting', + 'Fiddle-faddling', + 'Finagling', + 'Flambéing', + 'Flibbertigibbeting', + 'Flowing', + 'Flummoxing', + 'Fluttering', + 'Forging', + 'Forming', + 'Frolicking', + 'Frosting', + 'Gallivanting', + 'Galloping', + 'Garnishing', + 'Generating', + 'Gesticulating', + 'Germinating', + 'Gitifying', + 'Grooving', + 'Gusting', + 'Harmonizing', + 'Hashing', + 'Hatching', + 'Herding', + 'Honking', + 'Hullaballooing', + 'Hyperspacing', + 'Ideating', + 'Imagining', + 'Improvising', + 'Incubating', + 'Inferring', + 'Infusing', + 'Ionizing', + 'Jitterbugging', + 'Julienning', + 'Kneading', + 'Leavening', + 'Levitating', + 'Lollygagging', + 'Manifesting', + 'Marinating', + 'Meandering', + 'Metamorphosing', + 'Misting', + 'Moonwalking', + 'Moseying', + 'Mulling', + 'Mustering', + 'Musing', + 'Nebulizing', + 'Nesting', + 'Newspapering', + 'Noodling', + 'Nucleating', + 'Orbiting', + 'Orchestrating', + 'Osmosing', + 'Perambulating', + 'Percolating', + 'Perusing', + 'Philosophising', + 'Photosynthesizing', + 'Pollinating', + 'Pondering', + 'Pontificating', + 'Pouncing', + 'Precipitating', + 'Prestidigitating', + 'Processing', + 'Proofing', + 'Propagating', + 'Puttering', + 'Puzzling', + 'Quantumizing', + 'Razzle-dazzling', + 'Razzmatazzing', + 'Recombobulating', + 'Reticulating', + 'Roosting', + 'Ruminating', + 'Sautéing', + 'Scampering', + 'Schlepping', + 'Scurrying', + 'Seasoning', + 'Shenaniganing', + 'Shimmying', + 'Simmering', + 'Skedaddling', + 'Sketching', + 'Slithering', + 'Smooshing', + 'Sock-hopping', + 'Spelunking', + 'Spinning', + 'Sprouting', + 'Stewing', + 'Sublimating', + 'Swirling', + 'Swooping', + 'Symbioting', + 'Synthesizing', + 'Tempering', + 'Thinking', + 'Thundering', + 'Tinkering', + 'Tomfoolering', + 'Topsy-turvying', + 'Transfiguring', + 'Transmuting', + 'Twisting', + 'Undulating', + 'Unfurling', + 'Unravelling', + 'Vibing', + 'Waddling', + 'Wandering', + 'Warping', + 'Whatchamacalliting', + 'Whirlpooling', + 'Whirring', + 'Whisking', + 'Wibbling', + 'Working', + 'Wrangling', + 'Zesting', + 'Zigzagging', +]; + +const getRandomWaitingWord = (): string => { + const index = Math.floor(Math.random() * WAITING_WORDS.length); + return `${WAITING_WORDS[index]}...`; +}; + +export class ProgressNotifier { + private extra?: ProgressExtra; + + private startTime: number; + + private progressToken?: string | number; + + private waitingIntervalId?: NodeJS.Timeout; + + constructor(extra?: ProgressExtra) { + this.extra = extra; + this.startTime = Date.now(); + this.progressToken = extra?._meta?.progressToken as string | number | undefined; + } + + private canSendProgress(): boolean { + return this.progressToken !== undefined && this.extra?.sendNotification !== undefined; + } + + async notify(message: string, progress?: number, total?: number): Promise { + if (!this.canSendProgress()) { + return; + } + + try { + await this.extra!.sendNotification({ + method: 'notifications/progress', + params: { + progressToken: this.progressToken!, + progress: progress ?? 0, + total: total ?? 1, + message, + }, + } as ServerNotification); + } catch { + // Silently ignore if client doesn't support progress notifications + } + } + + async notifyAfterDelay(message: string, delayMs: number = 3000): Promise { + if (!this.canSendProgress()) { + return null; + } + + return setTimeout(async () => { + await this.notify(message); + }, delayMs); + } + + startWaitingNotifications(initialDelayMs: number = 3000, intervalMs: number = 5000): void { + if (!this.canSendProgress()) { + return; + } + + setTimeout(() => { + this.notify(getRandomWaitingWord()); + + this.waitingIntervalId = setInterval(() => { + this.notify(getRandomWaitingWord()); + }, intervalMs); + }, initialDelayMs); + } + + stopWaitingNotifications(): void { + if (this.waitingIntervalId) { + clearInterval(this.waitingIntervalId); + this.waitingIntervalId = undefined; + } + } + + getElapsedMs(): number { + return Date.now() - this.startTime; + } +} + +// eslint-disable-next-line no-restricted-syntax +export async function withProgress( + extra: ProgressExtra, + stages: { + onStart?: string; + onWaiting?: string; + onProcessing?: string; + waitingDelayMs?: number; + }, + operation: (notifier: ProgressNotifier) => Promise +): Promise { + const notifier = new ProgressNotifier(extra); + let waitingTimeout: NodeJS.Timeout | null = null; + + try { + if (stages.onStart) { + await notifier.notify(stages.onStart, 0, 1); + } + + if (stages.onWaiting) { + waitingTimeout = await notifier.notifyAfterDelay( + stages.onWaiting, + stages.waitingDelayMs ?? 3000 + ); + } + + const result = await operation(notifier); + + if (waitingTimeout) { + clearTimeout(waitingTimeout); + } + + if (stages.onProcessing) { + await notifier.notify(stages.onProcessing, 0.9, 1); + } + + return result; + } finally { + if (waitingTimeout) { + clearTimeout(waitingTimeout); + } + } +} diff --git a/src/version.ts b/src/version.ts new file mode 100644 index 0000000..4b62084 --- /dev/null +++ b/src/version.ts @@ -0,0 +1,7 @@ +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; + +/** Resolved at runtime so `rootDir` can stay `./src` (no `import` of repo-root `package.json`). */ +export const PACKAGE_VERSION = ( + JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf8')) as { version: string } +).version; diff --git a/tsconfig.json b/tsconfig.json index 07491a9..aaeeb1c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,6 +2,7 @@ "compilerOptions": { "target": "es2017", "module": "nodenext", + "rootDir": "./src", "outDir": "./build", "baseUrl": "src", "strict": true,