diff --git a/packages/auto/scrape_table_data.genai.mts b/packages/auto/scrape_table_data.genai.mts index 1489edfe17..b74a19403b 100644 --- a/packages/auto/scrape_table_data.genai.mts +++ b/packages/auto/scrape_table_data.genai.mts @@ -3,6 +3,6 @@ const page = await host.browse('https://example.com'); // Select a table element on the page by its test-id const table = page.locator('table[data-testid="data-table"]'); // Convert the HTML of the table into Markdown format -const markdownTable = parsers.HTMLToMarkdown(await table.innerHTML()); +const markdownTable = await parsers.HTMLToMarkdown(await table.innerHTML()); // Define the converted table for further processing def('TABLE_MARKDOWN', markdownTable); diff --git a/packages/sample/genaisrc/blog/unlock-the-power-of-automatic-web-page-content-analysis-with-genaiscript-.genai.mjs b/packages/sample/genaisrc/blog/unlock-the-power-of-automatic-web-page-content-analysis-with-genaiscript-.genai.mjs index 33653811a1..8eb5372488 100644 --- a/packages/sample/genaisrc/blog/unlock-the-power-of-automatic-web-page-content-analysis-with-genaiscript-.genai.mjs +++ b/packages/sample/genaisrc/blog/unlock-the-power-of-automatic-web-page-content-analysis-with-genaiscript-.genai.mjs @@ -1,6 +1,6 @@ -const page = await host.browse('https://bing.com'); -const screenshot = await page.screenshot(); -defImages(screenshot); -const text = parsers.HTMLToMarkdown(await page.content()) -def('PAGE_TEXT', text); -$`Analyze the content of the page PAGE_TEXT and provide insights.`; \ No newline at end of file +const page = await host.browse("https://bing.com") +const screenshot = await page.screenshot() +defImages(screenshot) +const text = await parsers.HTMLToMarkdown(await page.content()) +def("PAGE_TEXT", text) +$`Analyze the content of the page PAGE_TEXT and provide insights.` diff --git a/packages/sample/genaisrc/rss.genai.mjs b/packages/sample/genaisrc/rss.genai.mjs index 1a0e612d56..9d688e70f0 100644 --- a/packages/sample/genaisrc/rss.genai.mjs +++ b/packages/sample/genaisrc/rss.genai.mjs @@ -8,10 +8,15 @@ const { rss } = XML.parse(await (await fetch("https://dev.to/feed")).text()) defData( "ARTICLES", - rss.channel.item.map(({ title, description }) => ({ - title, - description: parsers.HTMLToText(description, {}).slice(0, 2000), - })) + await Promise.all( + rss.channel.item.map(async ({ title, description }) => ({ + title, + description: (await parsers.HTMLToText(description, {})).slice( + 0, + 2000 + ), + })) + ) ) $` - Summarize ARTICLES