From f44a209d4a978d36ac6dd34caf12237961f0353a Mon Sep 17 00:00:00 2001 From: Domantas Jurkus Date: Thu, 23 Apr 2026 22:19:31 +0300 Subject: [PATCH 1/2] feat: version 1.1.0 --- package-lock.json | 4 ++-- package.json | 2 +- server.json | 4 ++-- src/server/sapi-base-server.ts | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 69f6190..d865a28 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "license": "ISC", "dependencies": { "@modelcontextprotocol/sdk": "^1.12.3", diff --git a/package.json b/package.json index 49d5800..026d6e5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "description": "Decodo MCP Server", "bin": { "decodo-mcp": "./build/index.js" diff --git a/server.json b/server.json index 9ce1246..932a3f2 100644 --- a/server.json +++ b/server.json @@ -7,13 +7,13 @@ "url": "https://github.com/Decodo/mcp-web-scraper", "source": "github" }, - "version": "1.2.0", + "version": "1.2.1", "packages": [ { "registryType": "npm", "registryBaseUrl": "https://registry.npmjs.org", "identifier": "@decodo/mcp-server", - "version": "1.2.0", + "version": "1.2.1", "transport": { "type": "stdio" } diff --git a/src/server/sapi-base-server.ts b/src/server/sapi-base-server.ts index fd112cd..3de1a06 100644 --- a/src/server/sapi-base-server.ts +++ b/src/server/sapi-base-server.ts @@ -47,7 +47,7 @@ export class ScraperAPIBaseServer { constructor({ auth, toolsets = [] }: { auth: string; toolsets: TOOLSET[] }) { this.server = new McpServer({ name: 'decodo', - version: '1.2.0', + version: '1.2.1', }); this.sapiClient = new ScraperApiClient(); From 5f4adc943b5fa005c82a5fb4c6f980a6f3d2e00c Mon Sep 17 00:00:00 2001 From: Domantas Jurkus Date: Fri, 24 Apr 2026 14:12:30 +0300 Subject: [PATCH 2/2] Add progress tracker --- package.json | 3 +- src/clients/scraper-api-client.ts | 126 ++++--- .../amazon-bestsellers-tool.ts | 5 +- .../amazon-pricing/amazon-pricing-tool.ts | 6 +- .../amazon-product/amazon-product-tool.ts | 6 +- src/tools/amazon-search/amazon-search-tool.ts | 6 +- .../amazon-sellers/amazon-sellers-tool.ts | 5 +- src/tools/bing-search/bing-search-tool.ts | 6 +- src/tools/chatgpt/chatgpt-tool.ts | 5 +- src/tools/google-ads/google-ads-tool.ts | 6 +- .../google-ai-mode/google-ai-mode-tool.ts | 5 +- src/tools/google-lens/google-lens-tool.ts | 6 +- src/tools/google-search/google-search-tool.ts | 6 +- .../google-travel-hotels-tool.ts | 5 +- src/tools/perplexity/perplexity-tool.ts | 5 +- src/tools/reddit-post/reddit-post-tool.ts | 6 +- .../reddit-subreddit/reddit-subreddit-tool.ts | 6 +- src/tools/reddit-user/reddit-user-tool.ts | 6 +- .../scrape-as-markdown-tool.ts | 5 +- src/tools/screenshot/screenshot-tool.ts | 4 +- .../target-product/target-product-tool.ts | 5 +- src/tools/target-search/target-search-tool.ts | 6 +- src/tools/tiktok-post/tiktok-post-tool.ts | 5 +- .../tiktok-shop-product-tool.ts | 5 +- .../tiktok-shop-search-tool.ts | 6 +- .../tiktok-shop-url/tiktok-shop-url-tool.ts | 5 +- .../walmart-product/walmart-product-tool.ts | 6 +- .../walmart-search/walmart-search-tool.ts | 6 +- .../youtube-channel/youtube-channel-tool.ts | 5 +- .../youtube-metadata/youtube-metadata-tool.ts | 5 +- .../youtube-search/youtube-search-tool.ts | 5 +- .../youtube-subtitles-tool.ts | 5 +- src/utils.ts | 3 + src/utils/__tests__/progress.test.ts | 189 ++++++++++ src/utils/progress.ts | 325 ++++++++++++++++++ 35 files changed, 682 insertions(+), 127 deletions(-) create mode 100644 src/utils/__tests__/progress.test.ts create mode 100644 src/utils/progress.ts diff --git a/package.json b/package.json index 026d6e5..ddeb2a0 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,8 @@ "mcpName": "io.github.Decodo/mcp-web-scraper", "scripts": { "build": "tsc && chmod 755 build/index.js build/server.js", - "dev": "nodemon --watch src -e ts --exec 'npm run build'", + "dev": "nodemon --watch src -e ts --exec 'tsx src/server.ts'", + "dev:server": "nodemon --watch src -e ts --exec 'npm run build'", "start": "node build/server.js", "inspect": "mcp-inspector", "test": "jest", diff --git a/src/clients/scraper-api-client.ts b/src/clients/scraper-api-client.ts index 13733a4..796c4c7 100644 --- a/src/clients/scraper-api-client.ts +++ b/src/clients/scraper-api-client.ts @@ -1,10 +1,13 @@ import axios, { AxiosError, AxiosResponse } from 'axios'; import { ScraperApiResponseData } from './types'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; +import { ProgressNotifier, ProgressExtra } from '../utils'; const MAX_RETRIES = Math.max(0, parseInt(process.env.MAX_RETRIES ?? '2', 10) || 2); const RETRYABLE_STATUS_CODES = new Set([429, 502, 503, 504]); const RETRYABLE_NETWORK_CODES = new Set(['ECONNRESET', 'ETIMEDOUT', 'ECONNABORTED', 'ENOTFOUND']); +const WAITING_INITIAL_DELAY_MS = 3000; +const WAITING_INTERVAL_MS = 5000; const isRetryable = (error: AxiosError): boolean => { if (error.response) { @@ -60,68 +63,85 @@ export class ScraperApiClient { scrape = async ({ auth, scrapingParams, + extra, }: { auth: string; scrapingParams: ScrapingMCPParams; + extra?: ProgressExtra; }) => { - const transformedParams = this.transformScrapingParams({ scrapingParams }); - - const url = process.env.DECODO_SAPI_HOST || 'https://scraper-api.decodo.com'; - - let lastError: unknown; - - for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { - try { - const res = await axios.request>({ - url: `${url}/v2/scrape`, - method: 'POST', - headers: { - authorization: `Basic ${auth}`, - 'x-integration': 'mcp', - }, - timeout: 180000, - data: { - ...transformedParams, - }, - }); - - return this.transformResponse({ res }); - } catch (error) { - lastError = error; - - if (attempt < MAX_RETRIES && axios.isAxiosError(error) && isRetryable(error)) { - const delayMs = getRetryDelay(attempt, error); - const reason = error.response - ? `status ${error.response.status}` - : `network error ${error.code}`; - - console.error( - `[scraper-api-client] Retry ${ - attempt + 1 - }/${MAX_RETRIES} after ${reason}, waiting ${Math.round(delayMs)}ms` - ); - - await sleep(delayMs); - continue; - } + const notifier = new ProgressNotifier(extra); - break; - } - } + try { + await notifier.notify('Submitting request to Decodo API...', 0, 1); - if (axios.isAxiosError(lastError)) { - const status = lastError.response?.status; - let errorMessage = lastError.response?.data?.message ?? lastError.message; + notifier.startWaitingNotifications(WAITING_INITIAL_DELAY_MS, WAITING_INTERVAL_MS); - if (status === 401) { - errorMessage = 'Authentication failed.'; - } - if (status === 429) { - errorMessage = JSON.stringify(lastError.response?.data); + const transformedParams = this.transformScrapingParams({ scrapingParams }); + const url = process.env.DECODO_SAPI_HOST || 'https://scraper-api.decodo.com'; + + let lastError: unknown; + + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + try { + const res = await axios.request>({ + url: `${url}/v2/scrape`, + method: 'POST', + headers: { + authorization: `Basic ${auth}`, + 'x-integration': 'mcp', + }, + timeout: 180000, + data: { + ...transformedParams, + }, + }); + + notifier.stopWaitingNotifications(); + + await notifier.notify('Processing response...', 0.9, 1); + + return this.transformResponse({ res }); + } catch (error) { + lastError = error; + + if (attempt < MAX_RETRIES && axios.isAxiosError(error) && isRetryable(error)) { + const delayMs = getRetryDelay(attempt, error); + const reason = error.response + ? `status ${error.response.status}` + : `network error ${error.code}`; + + console.error( + `[scraper-api-client] Retry ${ + attempt + 1 + }/${MAX_RETRIES} after ${reason}, waiting ${Math.round(delayMs)}ms` + ); + + await notifier.notify(`Retrying (${attempt + 1}/${MAX_RETRIES})...`, 0.1, 1); + + await sleep(delayMs); + continue; + } + + break; + } } - throw new Error(`Scraper API request failed (${status}): ${errorMessage}`); + if (axios.isAxiosError(lastError)) { + const status = lastError.response?.status; + let errorMessage = lastError.response?.data?.message ?? lastError.message; + + if (status === 401) { + errorMessage = 'Authentication failed.'; + } + if (status === 429) { + errorMessage = JSON.stringify(lastError.response?.data); + } + + throw new Error(`Scraper API request failed (${status}): ${errorMessage}`); + } + throw lastError; + } finally { + notifier.stopWaitingNotifications(); } - throw lastError; }; } diff --git a/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts b/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts index 8e481d7..0d5d00c 100644 --- a/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts +++ b/src/tools/amazon-bestsellers/amazon-bestsellers-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDomain = z .string() @@ -37,14 +38,14 @@ export class AmazonBestsellersTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_BESTSELLERS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/amazon-pricing/amazon-pricing-tool.ts b/src/tools/amazon-pricing/amazon-pricing-tool.ts index fd0a24e..9b8905b 100644 --- a/src/tools/amazon-pricing/amazon-pricing-tool.ts +++ b/src/tools/amazon-pricing/amazon-pricing-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -51,14 +51,14 @@ export class AmazonPricingTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_PRICING, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-product/amazon-product-tool.ts b/src/tools/amazon-product/amazon-product-tool.ts index 401dcdc..eb714c2 100644 --- a/src/tools/amazon-product/amazon-product-tool.ts +++ b/src/tools/amazon-product/amazon-product-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -45,14 +45,14 @@ export class AmazonProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_PRODUCT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-search/amazon-search-tool.ts b/src/tools/amazon-search/amazon-search-tool.ts index 7399e1a..9030059 100644 --- a/src/tools/amazon-search/amazon-search-tool.ts +++ b/src/tools/amazon-search/amazon-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -46,14 +46,14 @@ export class AmazonSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/amazon-sellers/amazon-sellers-tool.ts b/src/tools/amazon-sellers/amazon-sellers-tool.ts index 3a7dbac..bec53b5 100644 --- a/src/tools/amazon-sellers/amazon-sellers-tool.ts +++ b/src/tools/amazon-sellers/amazon-sellers-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDomain = z .string() @@ -38,14 +39,14 @@ export class AmazonSellersTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.AMAZON_SELLERS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/bing-search/bing-search-tool.ts b/src/tools/bing-search/bing-search-tool.ts index dd03ac8..fe5a6f6 100644 --- a/src/tools/bing-search/bing-search-tool.ts +++ b/src/tools/bing-search/bing-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -47,14 +47,14 @@ export class BingSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.BING_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/chatgpt/chatgpt-tool.ts b/src/tools/chatgpt/chatgpt-tool.ts index b7416bb..45931cf 100644 --- a/src/tools/chatgpt/chatgpt-tool.ts +++ b/src/tools/chatgpt/chatgpt-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodGeo } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ChatGPTTool extends Tool { toolset = TOOLSET.AI; @@ -26,14 +27,14 @@ export class ChatGPTTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.CHATGPT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-ads/google-ads-tool.ts b/src/tools/google-ads/google-ads-tool.ts index 6cdb9ca..b7f6617 100644 --- a/src/tools/google-ads/google-ads-tool.ts +++ b/src/tools/google-ads/google-ads-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -41,14 +41,14 @@ export class GoogleAdsTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_ADS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-ai-mode/google-ai-mode-tool.ts b/src/tools/google-ai-mode/google-ai-mode-tool.ts index b773f12..a9efe73 100644 --- a/src/tools/google-ai-mode/google-ai-mode-tool.ts +++ b/src/tools/google-ai-mode/google-ai-mode-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodGeo = z .string() @@ -31,14 +32,14 @@ export class GoogleAiModeTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_AI_MODE, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/google-lens/google-lens-tool.ts b/src/tools/google-lens/google-lens-tool.ts index 7e39add..68c0cc7 100644 --- a/src/tools/google-lens/google-lens-tool.ts +++ b/src/tools/google-lens/google-lens-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -33,14 +33,14 @@ export class GoogleLensTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_LENS, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-search/google-search-tool.ts b/src/tools/google-search/google-search-tool.ts index 13dcfee..0095d07 100644 --- a/src/tools/google-search/google-search-tool.ts +++ b/src/tools/google-search/google-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodLocale, zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -41,14 +41,14 @@ export class GoogleSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_SEARCH, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/google-travel-hotels/google-travel-hotels-tool.ts b/src/tools/google-travel-hotels/google-travel-hotels-tool.ts index 99d3620..341f31e 100644 --- a/src/tools/google-travel-hotels/google-travel-hotels-tool.ts +++ b/src/tools/google-travel-hotels/google-travel-hotels-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType, zodLocale } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodPageFrom = z .number() @@ -33,14 +34,14 @@ export class GoogleTravelHotelsTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.GOOGLE_TRAVEL_HOTELS, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/perplexity/perplexity-tool.ts b/src/tools/perplexity/perplexity-tool.ts index 0b9cdbb..a3dbdf3 100644 --- a/src/tools/perplexity/perplexity-tool.ts +++ b/src/tools/perplexity/perplexity-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodGeo } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class PerplexityTool extends Tool { toolset = TOOLSET.AI; @@ -26,14 +27,14 @@ export class PerplexityTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.PERPLEXITY, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-post/reddit-post-tool.ts b/src/tools/reddit-post/reddit-post-tool.ts index 3a8d06a..d81f3c9 100644 --- a/src/tools/reddit-post/reddit-post-tool.ts +++ b/src/tools/reddit-post/reddit-post-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditPostTool extends Tool { @@ -34,13 +34,13 @@ export class RedditPostTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_POST, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-subreddit/reddit-subreddit-tool.ts b/src/tools/reddit-subreddit/reddit-subreddit-tool.ts index e2756fb..51a7d6a 100644 --- a/src/tools/reddit-subreddit/reddit-subreddit-tool.ts +++ b/src/tools/reddit-subreddit/reddit-subreddit-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditSubredditTool extends Tool { @@ -30,13 +30,13 @@ export class RedditSubredditTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_SUBREDDIT, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/reddit-user/reddit-user-tool.ts b/src/tools/reddit-user/reddit-user-tool.ts index 2051daf..0a370d1 100644 --- a/src/tools/reddit-user/reddit-user-tool.ts +++ b/src/tools/reddit-user/reddit-user-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { Tool, ToolRegistrationArgs } from '../tool'; export class RedditUserTool extends Tool { @@ -36,13 +36,13 @@ export class RedditUserTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.REDDIT_USER, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts b/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts index 3c43027..25c8e25 100644 --- a/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts +++ b/src/tools/scrape-as-markdown/scrape-as-markdown-tool.ts @@ -4,6 +4,7 @@ import { NodeHtmlMarkdown } from 'node-html-markdown'; import { zodGeo, zodJsRender, zodLocale, zodTokenLimit } from '../../zod/zod-types'; import { TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ScrapeAsMarkdownTool extends Tool { toolset = TOOLSET.WEB; @@ -56,8 +57,8 @@ export class ScrapeAsMarkdownTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { - const { data } = await sapiClient.scrape({ auth, scrapingParams }); + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { + const { data } = await sapiClient.scrape({ auth, scrapingParams, extra }); const { data: markdown, isTruncated } = this.transformResponse({ data, diff --git a/src/tools/screenshot/screenshot-tool.ts b/src/tools/screenshot/screenshot-tool.ts index 52a849a..2f1846c 100644 --- a/src/tools/screenshot/screenshot-tool.ts +++ b/src/tools/screenshot/screenshot-tool.ts @@ -3,6 +3,7 @@ import { ScrapingMCPParams } from 'types'; import { zodGeo } from '../../zod/zod-types'; import { TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class ScreenshotTool extends Tool { toolset = TOOLSET.WEB; @@ -25,10 +26,11 @@ export class ScreenshotTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const { data } = await sapiClient.scrape({ auth, scrapingParams: { ...scrapingParams, headless: 'png' }, + extra, }); return { diff --git a/src/tools/target-product/target-product-tool.ts b/src/tools/target-product/target-product-tool.ts index f150c11..620a573 100644 --- a/src/tools/target-product/target-product-tool.ts +++ b/src/tools/target-product/target-product-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodDeliveryZip = z.string().describe('ZIP code for delivery location').optional(); @@ -32,7 +33,7 @@ export class TargetProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { headless: 'html', ...scrapingParams, @@ -40,7 +41,7 @@ export class TargetProductTool extends Tool { parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/target-search/target-search-tool.ts b/src/tools/target-search/target-search-tool.ts index b1ee098..a0e3226 100644 --- a/src/tools/target-search/target-search-tool.ts +++ b/src/tools/target-search/target-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -39,7 +39,7 @@ export class TargetSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { headless: 'html', ...scrapingParams, @@ -47,7 +47,7 @@ export class TargetSearchTool extends Tool { markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-post/tiktok-post-tool.ts b/src/tools/tiktok-post/tiktok-post-tool.ts index 26f78d8..adda1d5 100644 --- a/src/tools/tiktok-post/tiktok-post-tool.ts +++ b/src/tools/tiktok-post/tiktok-post-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodXhr } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokPostTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -28,13 +29,13 @@ export class TiktokPostTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_POST, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts b/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts index 6d7caf6..387d378 100644 --- a/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts +++ b/src/tools/tiktok-shop-product/tiktok-shop-product-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender, zodDeviceType, zodCountry } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokShopProductTool extends Tool { toolset = TOOLSET.ECOMMERCE; @@ -27,14 +28,14 @@ export class TiktokShopProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_PRODUCT, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts b/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts index 5184557..8cf0db5 100644 --- a/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts +++ b/src/tools/tiktok-shop-search/tiktok-shop-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodGeo, zodJsRender, zodCountry, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -35,14 +35,14 @@ export class TiktokShopSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_SEARCH, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts b/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts index c099ac3..4deb13c 100644 --- a/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts +++ b/src/tools/tiktok-shop-url/tiktok-shop-url-tool.ts @@ -3,6 +3,7 @@ import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class TiktokShopUrlTool extends Tool { toolset = TOOLSET.ECOMMERCE; @@ -25,13 +26,13 @@ export class TiktokShopUrlTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.TIKTOK_SHOP_URL, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/walmart-product/walmart-product-tool.ts b/src/tools/walmart-product/walmart-product-tool.ts index 9d5e952..5428094 100644 --- a/src/tools/walmart-product/walmart-product-tool.ts +++ b/src/tools/walmart-product/walmart-product-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -44,14 +44,14 @@ export class WalmartProductTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.WALMART_PRODUCT, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/walmart-search/walmart-search-tool.ts b/src/tools/walmart-search/walmart-search-tool.ts index facd693..8041a15 100644 --- a/src/tools/walmart-search/walmart-search-tool.ts +++ b/src/tools/walmart-search/walmart-search-tool.ts @@ -1,7 +1,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; -import { removeKeyFromNestedObject } from '../../utils'; +import { removeKeyFromNestedObject, ProgressExtra } from '../../utils'; import { zodJsRender, zodDeviceType } from '../../zod/zod-types'; import { Tool, ToolRegistrationArgs } from '../tool'; @@ -45,14 +45,14 @@ export class WalmartSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.WALMART_SEARCH, markdown: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); const { data: text } = this.transformResponse({ data }); diff --git a/src/tools/youtube-channel/youtube-channel-tool.ts b/src/tools/youtube-channel/youtube-channel-tool.ts index 2e2a7d4..37f70cb 100644 --- a/src/tools/youtube-channel/youtube-channel-tool.ts +++ b/src/tools/youtube-channel/youtube-channel-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodLimit = z .number() @@ -29,14 +30,14 @@ export class YoutubeChannelTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_CHANNEL, parse: true, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-metadata/youtube-metadata-tool.ts b/src/tools/youtube-metadata/youtube-metadata-tool.ts index 1a3279b..2ea2fb7 100644 --- a/src/tools/youtube-metadata/youtube-metadata-tool.ts +++ b/src/tools/youtube-metadata/youtube-metadata-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class YoutubeMetadataTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -23,13 +24,13 @@ export class YoutubeMetadataTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_METADATA, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-search/youtube-search-tool.ts b/src/tools/youtube-search/youtube-search-tool.ts index 8e8b262..75b89d3 100644 --- a/src/tools/youtube-search/youtube-search-tool.ts +++ b/src/tools/youtube-search/youtube-search-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; export class YoutubeSearchTool extends Tool { toolset = TOOLSET.SOCIAL_MEDIA; @@ -23,13 +24,13 @@ export class YoutubeSearchTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_SEARCH, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/tools/youtube-subtitles/youtube-subtitles-tool.ts b/src/tools/youtube-subtitles/youtube-subtitles-tool.ts index c1f5b6c..935b233 100644 --- a/src/tools/youtube-subtitles/youtube-subtitles-tool.ts +++ b/src/tools/youtube-subtitles/youtube-subtitles-tool.ts @@ -2,6 +2,7 @@ import z from 'zod'; import { ScraperAPIParams, ScrapingMCPParams } from 'types'; import { SCRAPER_API_TARGETS, TOOLSET } from '../../constants'; import { Tool, ToolRegistrationArgs } from '../tool'; +import { ProgressExtra } from '../../utils'; const zodLanguageCode = z .string() @@ -29,13 +30,13 @@ export class YoutubeSubtitlesTool extends Tool { openWorldHint: true, }, }, - async (scrapingParams: ScrapingMCPParams) => { + async (scrapingParams: ScrapingMCPParams, extra: ProgressExtra) => { const params = { ...scrapingParams, target: SCRAPER_API_TARGETS.YOUTUBE_SUBTITLES, } satisfies ScraperAPIParams; - const { data } = await sapiClient.scrape({ auth, scrapingParams: params }); + const { data } = await sapiClient.scrape({ auth, scrapingParams: params, extra }); return { content: [ diff --git a/src/utils.ts b/src/utils.ts index 3148789..a5c280a 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,5 +1,8 @@ import { TOOLSET } from './constants'; +export { ProgressNotifier, withProgress } from './utils/progress'; +export type { ProgressExtra } from './utils/progress'; + export const resolveToolsets = (toolsets?: string): TOOLSET[] => { if (!toolsets?.trim()) { return []; diff --git a/src/utils/__tests__/progress.test.ts b/src/utils/__tests__/progress.test.ts new file mode 100644 index 0000000..9125fa0 --- /dev/null +++ b/src/utils/__tests__/progress.test.ts @@ -0,0 +1,189 @@ +import { ProgressNotifier, ProgressExtra } from '../progress'; + +describe('ProgressNotifier', () => { + describe('when progressToken is provided', () => { + it('sends progress notification', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0.5, + total: 1, + message: 'Processing...', + }, + }); + }); + + it('sends notification with default progress values', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Starting...'); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0, + total: 1, + message: 'Starting...', + }, + }); + }); + + it('schedules delayed notification', async () => { + jest.useFakeTimers(); + + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + + expect(timeout).not.toBeNull(); + expect(mockSendNotification).not.toHaveBeenCalled(); + + jest.advanceTimersByTime(3000); + await Promise.resolve(); + + expect(mockSendNotification).toHaveBeenCalledWith({ + method: 'notifications/progress', + params: { + progressToken: 'token-456', + progress: 0, + total: 1, + message: 'Waiting...', + }, + }); + + if (timeout) clearTimeout(timeout); + jest.useRealTimers(); + }); + + it('silently ignores errors from sendNotification', async () => { + const mockSendNotification = jest.fn().mockRejectedValue(new Error('Network error')); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: { + progressToken: 'token-456', + }, + }; + + const notifier = new ProgressNotifier(extra); + + await expect(notifier.notify('Processing...')).resolves.toBeUndefined(); + }); + }); + + describe('when progressToken is NOT provided', () => { + it('does not send notification when progressToken is missing', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + _meta: {}, + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).not.toHaveBeenCalled(); + }); + + it('does not send notification when _meta is missing', async () => { + const mockSendNotification = jest.fn().mockResolvedValue(undefined); + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: mockSendNotification, + sendRequest: jest.fn(), + }; + + const notifier = new ProgressNotifier(extra); + await notifier.notify('Processing...', 0.5, 1); + + expect(mockSendNotification).not.toHaveBeenCalled(); + }); + + it('does not schedule delayed notification', async () => { + const extra: ProgressExtra = { + signal: new AbortController().signal, + requestId: 'req-123', + sendNotification: jest.fn(), + sendRequest: jest.fn(), + }; + + const notifier = new ProgressNotifier(extra); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + + expect(timeout).toBeNull(); + }); + }); + + describe('when extra is undefined', () => { + it('does not throw when extra is undefined', async () => { + const notifier = new ProgressNotifier(undefined); + await expect(notifier.notify('Processing...')).resolves.toBeUndefined(); + }); + + it('returns null for delayed notification', async () => { + const notifier = new ProgressNotifier(undefined); + const timeout = await notifier.notifyAfterDelay('Waiting...', 3000); + expect(timeout).toBeNull(); + }); + }); + + describe('getElapsedMs', () => { + it('returns elapsed time since construction', async () => { + jest.useFakeTimers(); + + const notifier = new ProgressNotifier(undefined); + + expect(notifier.getElapsedMs()).toBe(0); + + jest.advanceTimersByTime(1500); + expect(notifier.getElapsedMs()).toBe(1500); + + jest.advanceTimersByTime(500); + expect(notifier.getElapsedMs()).toBe(2000); + + jest.useRealTimers(); + }); + }); +}); diff --git a/src/utils/progress.ts b/src/utils/progress.ts new file mode 100644 index 0000000..e9732e4 --- /dev/null +++ b/src/utils/progress.ts @@ -0,0 +1,325 @@ +import { ServerNotification, ServerRequest } from '@modelcontextprotocol/sdk/types.js'; +import { RequestHandlerExtra } from '@modelcontextprotocol/sdk/shared/protocol.js'; + +export type ProgressNotificationParams = { + progressToken: string | number; + progress: number; + total?: number; + message?: string; +}; + +export type ProgressExtra = RequestHandlerExtra; + +const WAITING_WORDS = [ + 'Accomplishing', + 'Actioning', + 'Actualizing', + 'Architecting', + 'Baking', + 'Beaming', + "Beboppin'", + 'Befuddling', + 'Billowing', + 'Blanching', + 'Bloviating', + 'Boogieing', + 'Boondoggling', + 'Booping', + 'Bootstrapping', + 'Brewing', + 'Bunning', + 'Burrowing', + 'Calculating', + 'Canoodling', + 'Caramelizing', + 'Cascading', + 'Catapulting', + 'Cerebrating', + 'Channeling', + 'Channelling', + 'Choreographing', + 'Churning', + 'Clauding', + 'Coalescing', + 'Cogitating', + 'Combobulating', + 'Composing', + 'Computing', + 'Concocting', + 'Considering', + 'Contemplating', + 'Cooking', + 'Crafting', + 'Creating', + 'Crunching', + 'Crystallizing', + 'Cultivating', + 'Deciphering', + 'Deliberating', + 'Determining', + 'Dilly-dallying', + 'Discombobulating', + 'Doing', + 'Doodling', + 'Drizzling', + 'Ebbing', + 'Effecting', + 'Elucidating', + 'Embellishing', + 'Enchanting', + 'Envisioning', + 'Evaporating', + 'Fermenting', + 'Fiddle-faddling', + 'Finagling', + 'Flambéing', + 'Flibbertigibbeting', + 'Flowing', + 'Flummoxing', + 'Fluttering', + 'Forging', + 'Forming', + 'Frolicking', + 'Frosting', + 'Gallivanting', + 'Galloping', + 'Garnishing', + 'Generating', + 'Gesticulating', + 'Germinating', + 'Gitifying', + 'Grooving', + 'Gusting', + 'Harmonizing', + 'Hashing', + 'Hatching', + 'Herding', + 'Honking', + 'Hullaballooing', + 'Hyperspacing', + 'Ideating', + 'Imagining', + 'Improvising', + 'Incubating', + 'Inferring', + 'Infusing', + 'Ionizing', + 'Jitterbugging', + 'Julienning', + 'Kneading', + 'Leavening', + 'Levitating', + 'Lollygagging', + 'Manifesting', + 'Marinating', + 'Meandering', + 'Metamorphosing', + 'Misting', + 'Moonwalking', + 'Moseying', + 'Mulling', + 'Mustering', + 'Musing', + 'Nebulizing', + 'Nesting', + 'Newspapering', + 'Noodling', + 'Nucleating', + 'Orbiting', + 'Orchestrating', + 'Osmosing', + 'Perambulating', + 'Percolating', + 'Perusing', + 'Philosophising', + 'Photosynthesizing', + 'Pollinating', + 'Pondering', + 'Pontificating', + 'Pouncing', + 'Precipitating', + 'Prestidigitating', + 'Processing', + 'Proofing', + 'Propagating', + 'Puttering', + 'Puzzling', + 'Quantumizing', + 'Razzle-dazzling', + 'Razzmatazzing', + 'Recombobulating', + 'Reticulating', + 'Roosting', + 'Ruminating', + 'Sautéing', + 'Scampering', + 'Schlepping', + 'Scurrying', + 'Seasoning', + 'Shenaniganing', + 'Shimmying', + 'Simmering', + 'Skedaddling', + 'Sketching', + 'Slithering', + 'Smooshing', + 'Sock-hopping', + 'Spelunking', + 'Spinning', + 'Sprouting', + 'Stewing', + 'Sublimating', + 'Swirling', + 'Swooping', + 'Symbioting', + 'Synthesizing', + 'Tempering', + 'Thinking', + 'Thundering', + 'Tinkering', + 'Tomfoolering', + 'Topsy-turvying', + 'Transfiguring', + 'Transmuting', + 'Twisting', + 'Undulating', + 'Unfurling', + 'Unravelling', + 'Vibing', + 'Waddling', + 'Wandering', + 'Warping', + 'Whatchamacalliting', + 'Whirlpooling', + 'Whirring', + 'Whisking', + 'Wibbling', + 'Working', + 'Wrangling', + 'Zesting', + 'Zigzagging', +]; + +const getRandomWaitingWord = (): string => { + const index = Math.floor(Math.random() * WAITING_WORDS.length); + return `${WAITING_WORDS[index]}...`; +}; + +export class ProgressNotifier { + private extra?: ProgressExtra; + + private startTime: number; + + private progressToken?: string | number; + + private waitingIntervalId?: NodeJS.Timeout; + + constructor(extra?: ProgressExtra) { + this.extra = extra; + this.startTime = Date.now(); + this.progressToken = extra?._meta?.progressToken as string | number | undefined; + } + + private canSendProgress(): boolean { + return this.progressToken !== undefined && this.extra?.sendNotification !== undefined; + } + + async notify(message: string, progress?: number, total?: number): Promise { + if (!this.canSendProgress()) { + return; + } + + try { + await this.extra!.sendNotification({ + method: 'notifications/progress', + params: { + progressToken: this.progressToken!, + progress: progress ?? 0, + total: total ?? 1, + message, + }, + } as ServerNotification); + } catch { + // Silently ignore if client doesn't support progress notifications + } + } + + async notifyAfterDelay(message: string, delayMs: number = 3000): Promise { + if (!this.canSendProgress()) { + return null; + } + + return setTimeout(async () => { + await this.notify(message); + }, delayMs); + } + + startWaitingNotifications(initialDelayMs: number = 3000, intervalMs: number = 5000): void { + if (!this.canSendProgress()) { + return; + } + + setTimeout(() => { + this.notify(getRandomWaitingWord()); + + this.waitingIntervalId = setInterval(() => { + this.notify(getRandomWaitingWord()); + }, intervalMs); + }, initialDelayMs); + } + + stopWaitingNotifications(): void { + if (this.waitingIntervalId) { + clearInterval(this.waitingIntervalId); + this.waitingIntervalId = undefined; + } + } + + getElapsedMs(): number { + return Date.now() - this.startTime; + } +} + +// eslint-disable-next-line no-restricted-syntax +export async function withProgress( + extra: ProgressExtra, + stages: { + onStart?: string; + onWaiting?: string; + onProcessing?: string; + waitingDelayMs?: number; + }, + operation: (notifier: ProgressNotifier) => Promise +): Promise { + const notifier = new ProgressNotifier(extra); + let waitingTimeout: NodeJS.Timeout | null = null; + + try { + if (stages.onStart) { + await notifier.notify(stages.onStart, 0, 1); + } + + if (stages.onWaiting) { + waitingTimeout = await notifier.notifyAfterDelay( + stages.onWaiting, + stages.waitingDelayMs ?? 3000 + ); + } + + const result = await operation(notifier); + + if (waitingTimeout) { + clearTimeout(waitingTimeout); + } + + if (stages.onProcessing) { + await notifier.notify(stages.onProcessing, 0.9, 1); + } + + return result; + } finally { + if (waitingTimeout) { + clearTimeout(waitingTimeout); + } + } +}