gistlib
index.tsx// Function to scrape a website using Node.js and Cheerio const axios = require('axios'); const cheerio = require('cheerio'); async function scraper(url) { try { const response = await axios.get(url); const $ = cheerio.load(response.data); // Use jQuery selectors to extract data const title = $('title').text(); const paragraph = $('p').text(); return { title, paragraph }; } catch (error) { throw new Error(`Error scraping website: ${error.message}`); } } // Example usage const url = 'https://www.example.com'; scraper(url) .then(data => { console.log(data); }) .catch(error => { console.error(error); }); 714 chars30 lines
// Function to scrape a website using Node.js and Cheerio const axios = require('axios'); const cheerio = require('cheerio'); async function scraper(url) { try { const response = await axios.get(url); const $ = cheerio.load(response.data); // Use jQuery selectors to extract data const title = $('title').text(); const paragraph = $('p').text(); return { title, paragraph }; } catch (error) { throw new Error(`Error scraping website: ${error.message}`); } } // Example usage const url = 'https://www.example.com'; scraper(url) .then(data => { console.log(data); }) .catch(error => { console.error(error); });
gistlibby LogSnag