Several updates, readme updated, new selectores fixed

This commit is contained in:
drobnikj 2019-01-08 09:35:01 +01:00
parent 76fdde6dbd
commit 9f1ae7fca0
4 changed files with 3878 additions and 162 deletions

3726
README.md

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ const waitForGoogleMapLoader = (page) => page.waitFor(() => !document.querySelec
const enqueueAllUrlsFromPagination = async (page, requestQueue, paginationFrom, maxPlacesPerCrawl) => { const enqueueAllUrlsFromPagination = async (page, requestQueue, paginationFrom, maxPlacesPerCrawl) => {
let results = await page.$$('.section-result'); let results = await page.$$('.section-result');
const resultsCount = results.length; const resultsCount = results.length;
for (let resultIndex = 0; resultIndex < resultsCount; resultIndex++) { for (let resultIndex = 0; resultIndex < resultsCount; resultIndex++) {
// Need to get results again, pupptr lost context.. // Need to get results again, pupptr lost context..
await page.waitForSelector('.searchbox', { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector('.searchbox', { timeout: DEFAULT_TIMEOUT });
@ -21,6 +22,7 @@ const enqueueAllUrlsFromPagination = async (page, requestQueue, paginationFrom,
await link.click(); await link.click();
await waitForGoogleMapLoader(page); await waitForGoogleMapLoader(page);
await page.waitForSelector('.section-back-to-list-button', { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector('.section-back-to-list-button', { timeout: DEFAULT_TIMEOUT });
// After redirection to detail page, save the URL to Request queue to process it later
const url = page.url(); const url = page.url();
await requestQueue.addRequest({ url, userData: { label: 'detail' } }); await requestQueue.addRequest({ url, userData: { label: 'detail' } });
log.info(`Added to queue ${url}`); log.info(`Added to queue ${url}`);
@ -28,20 +30,23 @@ const enqueueAllUrlsFromPagination = async (page, requestQueue, paginationFrom,
log.info(`Reach max places per crawl ${maxPlacesPerCrawl}, stopped enqueuing new places.`); log.info(`Reach max places per crawl ${maxPlacesPerCrawl}, stopped enqueuing new places.`);
break; break;
} }
await page.click('.section-back-to-list-button'); await page.click('.section-back-to-list-button');
} }
}; };
/** /**
* Crawler add all place detail from listing to queue * Adds all places from listing to queue
* @param page * @param page
* @param searchString * @param searchString
* @param launchPuppeteerOptions
* @param requestQueue * @param requestQueue
* @param listingPagination
* @param maxPlacesPerCrawl * @param maxPlacesPerCrawl
*/ */
const enqueueAllPlaceDetailsCrawler = async (page, searchString, launchPuppeteerOptions, requestQueue, listingPagination, maxPlacesPerCrawl) => { const enqueueAllPlaceDetails = async (page, searchString, requestQueue, maxPlacesPerCrawl) => {
// Save state of listing pagination
// NOTE: If pageFunction failed crawler skipped already scraped pagination
const listingPagination = await Apify.getValue(LISTING_PAGINATION_KEY) || {};
await page.type('#searchboxinput', searchString); await page.type('#searchboxinput', searchString);
await sleep(5000); await sleep(5000);
await page.click('#searchbox-searchbutton'); await page.click('#searchbox-searchbutton');
@ -50,8 +55,9 @@ const enqueueAllPlaceDetailsCrawler = async (page, searchString, launchPuppeteer
try { try {
await page.waitForSelector('h1.section-hero-header-title'); await page.waitForSelector('h1.section-hero-header-title');
} catch (e) { } catch (e) {
// It can happen, doesn't matter :) // It can happen, if there are listing, not just detail page
} }
// In case there is no listing, put just detail page to queue // In case there is no listing, put just detail page to queue
const maybeDetailPlace = await page.$('h1.section-hero-header-title'); const maybeDetailPlace = await page.$('h1.section-hero-header-title');
if (maybeDetailPlace) { if (maybeDetailPlace) {
@ -59,6 +65,8 @@ const enqueueAllPlaceDetailsCrawler = async (page, searchString, launchPuppeteer
await requestQueue.addRequest({ url, userData: { label: 'detail' } }); await requestQueue.addRequest({ url, userData: { label: 'detail' } });
return; return;
} }
// In case there is listing, go through all details, limits with maxPlacesPerCrawl
const nextButtonSelector = '[jsaction="pane.paginationSection.nextPage"]'; const nextButtonSelector = '[jsaction="pane.paginationSection.nextPage"]';
while (true) { while (true) {
await page.waitForSelector(nextButtonSelector, { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector(nextButtonSelector, { timeout: DEFAULT_TIMEOUT });
@ -71,7 +79,8 @@ const enqueueAllPlaceDetailsCrawler = async (page, searchString, launchPuppeteer
} else { } else {
log.debug(`Added links from pagination ${from} - ${to}`); log.debug(`Added links from pagination ${from} - ${to}`);
await enqueueAllUrlsFromPagination(page, requestQueue, from, maxPlacesPerCrawl); await enqueueAllUrlsFromPagination(page, requestQueue, from, maxPlacesPerCrawl);
listingPagination = { from, to }; listingPagination.from = from;
listingPagination.to = to;
await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination); await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination);
} }
await page.waitForSelector(nextButtonSelector, { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector(nextButtonSelector, { timeout: DEFAULT_TIMEOUT });
@ -87,6 +96,9 @@ const enqueueAllPlaceDetailsCrawler = async (page, searchString, launchPuppeteer
await waitForGoogleMapLoader(page); await waitForGoogleMapLoader(page);
} }
} }
listingPagination.isFinish = true;
await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination);
}; };
module.exports = { run: enqueueAllPlaceDetailsCrawler }; module.exports = { enqueueAllPlaceDetails };

View File

@ -26,7 +26,7 @@ Apify.main(async () => {
const launchPuppeteerOptions = {}; const launchPuppeteerOptions = {};
if (proxyConfig) Object.assign(launchPuppeteerOptions, proxyConfig); if (proxyConfig) Object.assign(launchPuppeteerOptions, proxyConfig);
// Scrape all place detail links // Create and run crawler
const crawler = placesCrawler.setUpCrawler(launchPuppeteerOptions, requestQueue, maxCrawledPlaces); const crawler = placesCrawler.setUpCrawler(launchPuppeteerOptions, requestQueue, maxCrawledPlaces);
await crawler.run(); await crawler.run();

View File

@ -1,59 +1,20 @@
const Apify = require('apify'); const Apify = require('apify');
const { sleep, log } = Apify.utils; const { sleep, log } = Apify.utils;
const infiniteScroll = require('./infinite_scroll');
const { injectJQuery } = Apify.utils.puppeteer; const { injectJQuery } = Apify.utils.puppeteer;
const { MAX_PAGE_RETRIES, DEFAULT_TIMEOUT, LISTING_PAGINATION_KEY } = require('./consts'); const infiniteScroll = require('./infinite_scroll');
const enqueueAllPlaceDetailsCrawler = require('./enqueue_places_crawler'); const { MAX_PAGE_RETRIES, DEFAULT_TIMEOUT } = require('./consts');
const { enqueueAllPlaceDetails } = require('./enqueue_places_crawler');
/** /**
* Method to set up crawler to get all place details and save them to default dataset * This is the worst part - parsing data from place detail
* @param launchPuppeteerOptions * @param page
* @param requestQueue
* @param maxCrawledPlaces
* @return {Apify.PuppeteerCrawler}
*/ */
const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) => { const extractPlaceDetail = async (page) => {
const crawlerOpts = { // Extracts basic information
launchPuppeteerOptions,
requestQueue,
maxRequestRetries: MAX_PAGE_RETRIES,
retireInstanceAfterRequestCount: 10,
handlePageTimeoutSecs: 2 * 3600, // Two hours because startUrl crawler
maxOpenPagesPerInstance: 1, // Because startUrl crawler crashes if we mixed tabs with details scraping
// maxConcurrency: 1,
};
if (maxCrawledPlaces) {
crawlerOpts.maxRequestsPerCrawl = maxCrawledPlaces + 1; // The first one is startUrl
}
return new Apify.PuppeteerCrawler({
...crawlerOpts,
gotoFunction: async ({ request, page }) => {
await page._client.send('Emulation.clearDeviceMetricsOverride');
await page.goto(request.url, { timeout: 60000 });
},
handlePageFunction: async ({ request, page }) => {
const { label, searchString } = request.userData;
log.info(`Open ${request.url} with label: ${label}`);
await injectJQuery(page);
if (label === 'startUrl') {
// enqueue all places
log.info(`Start enqueuing place details for search: ${searchString}`);
// Store state of listing pagination
// NOTE: Ensured - If pageFunction failed crawler skipped already scraped pagination
const listingPagination = await Apify.getValue(LISTING_PAGINATION_KEY) || {};
await enqueueAllPlaceDetailsCrawler.run(page, searchString, launchPuppeteerOptions, requestQueue, listingPagination, maxCrawledPlaces);
listingPagination.isFinish = true;
await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination);
} else {
// Timeout because timeout for handle page is 2 hours
setTimeout(() => {
throw new Error('HandlePagefunction timed out!');
}, 600000);
// Get data from review
const titleSel = 'h1.section-hero-header-title'; const titleSel = 'h1.section-hero-header-title';
await page.waitForSelector(titleSel, { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector(titleSel, { timeout: DEFAULT_TIMEOUT });
const placeDetail = await page.evaluate(() => { const detail = await page.evaluate(() => {
return { return {
title: $('h1.section-hero-header-title').text().trim(), title: $('h1.section-hero-header-title').text().trim(),
totalScore: $('span.section-star-display').eq(0).text().trim(), totalScore: $('span.section-star-display').eq(0).text().trim(),
@ -62,17 +23,19 @@ const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) =>
plusCode: $('[data-section-id="ol"] .widget-pane-link').text().trim(), plusCode: $('[data-section-id="ol"] .widget-pane-link').text().trim(),
}; };
}); });
placeDetail.url = request.url;
// Extracty histogram for popular times
const histogramSel = '.section-popular-times'; const histogramSel = '.section-popular-times';
if (await page.$(histogramSel)) { if (await page.$(histogramSel)) {
placeDetail.popularTimesHistogram = await page.evaluate(() => { detail.popularTimesHistogram = await page.evaluate(() => {
const graphs = {}; const graphs = {};
const days = ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa']; const days = ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'];
// Days graphs // Extract all days graphs
$('.section-popular-times-graph').each(function(i) { $('.section-popular-times-graph').each(function(i) {
const day = days[i]; const day = days[i];
graphs[day] = []; graphs[day] = [];
let graphStartFromHour; let graphStartFromHour;
// Finds where x axis starts
$(this).find('.section-popular-times-label').each(function(labelIndex) { $(this).find('.section-popular-times-label').each(function(labelIndex) {
if (graphStartFromHour) return; if (graphStartFromHour) return;
const hourText = $(this).text().trim(); const hourText = $(this).text().trim();
@ -80,22 +43,28 @@ const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) =>
? 12 + (parseInt(hourText) - labelIndex) ? 12 + (parseInt(hourText) - labelIndex)
: parseInt(hourText) - labelIndex; : parseInt(hourText) - labelIndex;
}); });
// Finds values from y axis
$(this).find('.section-popular-times-bar').each(function (barIndex) { $(this).find('.section-popular-times-bar').each(function (barIndex) {
const occupancy = $(this).attr('aria-label').match(/\d+\s{1,}%/)[0]; const occupancyMatch = $(this).attr('aria-label').match(/\d+\s+?%/);
if (occupancyMatch) {
const maybeHour = graphStartFromHour + barIndex; const maybeHour = graphStartFromHour + barIndex;
graphs[day].push({ graphs[day].push({
hour: maybeHour > 24 ? maybeHour - 24 : maybeHour, hour: maybeHour > 24 ? maybeHour - 24 : maybeHour,
occupancy, occupancyPercent: parseInt(occupancyMatch[0]),
}); });
}
}); });
}); });
return graphs; return graphs;
}); });
} }
placeDetail.reviews = [];
// Extracts reviews
detail.reviews = [];
const reviewsButtonSel = 'button[jsaction="pane.reviewChart.moreReviews"]'; const reviewsButtonSel = 'button[jsaction="pane.reviewChart.moreReviews"]';
if (placeDetail.totalScore) { if (detail.totalScore) {
placeDetail.reviewsCount = await page.evaluate((selector) => { detail.totalScore = parseFloat(detail.totalScore.replace(',', '.'));
detail.reviewsCount = await page.evaluate((selector) => {
const numberReviewsText = $(selector).text().trim(); const numberReviewsText = $(selector).text().trim();
return (numberReviewsText) ? numberReviewsText.match(/\d+/)[0] : null; return (numberReviewsText) ? numberReviewsText.match(/\d+/)[0] : null;
}, reviewsButtonSel); }, reviewsButtonSel);
@ -111,12 +80,10 @@ const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) =>
// Sort reviews by newest, one click sometimes didn't work :) // Sort reviews by newest, one click sometimes didn't work :)
try { try {
const sortButtonEl = '.section-tab-info-stats-button-flex'; const sortButtonEl = '.section-tab-info-stats-button-flex';
for (let i = 0; i < 3; i++) {
await page.click(sortButtonEl); await page.click(sortButtonEl);
await sleep(1000); await sleep(1000);
await page.click(sortButtonEl); }
await sleep(1000);
await page.click(sortButtonEl);
await sleep(5000);
await page.click('.context-menu-entry[data-index="1"]'); await page.click('.context-menu-entry[data-index="1"]');
} catch (err) { } catch (err) {
// It can happen, it is not big issue :) // It can happen, it is not big issue :)
@ -145,17 +112,18 @@ const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) =>
} }
return reviewData; return reviewData;
}, reviewEl); }, reviewEl);
placeDetail.reviews.push(review); detail.reviews.push(review);
} }
await page.click('button.section-header-back-button'); await page.click('button.section-header-back-button');
} }
// Extracts place images
await page.waitForSelector(titleSel, { timeout: DEFAULT_TIMEOUT }); await page.waitForSelector(titleSel, { timeout: DEFAULT_TIMEOUT });
const imagesButtonSel = '[jsaction="pane.imagepack.button"]'; const imagesButtonSel = '[jsaction="pane.imagepack.button"]';
console.log(imagesButtonSel);
if (await page.$(imagesButtonSel)) { if (await page.$(imagesButtonSel)) {
await page.click(imagesButtonSel); await page.click(imagesButtonSel);
await infiniteScroll(page, 99999999999, '.section-scrollbox.section-listbox'); await infiniteScroll(page, 99999999999, '.section-scrollbox.section-listbox');
placeDetail.imageUrls = await page.evaluate(() => { detail.imageUrls = await page.evaluate(() => {
const urls = []; const urls = [];
$('.gallery-image-high-res').each(function () { $('.gallery-image-high-res').each(function () {
const urlMatch = $(this).attr('style').match(/url\("(.*)"\)/); const urlMatch = $(this).attr('style').match(/url\("(.*)"\)/);
@ -167,9 +135,55 @@ const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) =>
return urls; return urls;
}); });
} }
await Apify.pushData(placeDetail); return detail;
};
/**
* Method to set up crawler to get all place details and save them to default dataset
* @param launchPuppeteerOptions
* @param requestQueue
* @param maxCrawledPlaces
* @return {Apify.PuppeteerCrawler}
*/
const setUpCrawler = (launchPuppeteerOptions, requestQueue, maxCrawledPlaces) => {
const crawlerOpts = {
launchPuppeteerOptions,
requestQueue,
maxRequestRetries: MAX_PAGE_RETRIES,
retireInstanceAfterRequestCount: 10,
handlePageTimeoutSecs: 15 * 60, // 15 min because startUrl enqueueing
maxOpenPagesPerInstance: 1, // Because startUrl enqueueing crashes if we mixed tabs with details scraping
};
if (maxCrawledPlaces) {
crawlerOpts.maxRequestsPerCrawl = maxCrawledPlaces + 1; // The first one is startUrl
}
if (!Apify.isAtHome()) {
crawlerOpts.maxConcurrency = 2;
}
return new Apify.PuppeteerCrawler({
...crawlerOpts,
gotoFunction: async ({ request, page }) => {
await page._client.send('Emulation.clearDeviceMetricsOverride');
await page.goto(request.url, { timeout: 60000 });
},
handlePageFunction: async ({ request, page }) => {
const { label, searchString } = request.userData;
log.info(`Open ${request.url} with label: ${label}`);
await injectJQuery(page);
if (label === 'startUrl') {
log.info(`Start enqueuing places details for search: ${searchString}`);
await enqueueAllPlaceDetails(page, searchString, requestQueue, maxCrawledPlaces);
log.info('Enqueuing places finished!');
} else {
// Get data for place and save it to dataset
log.info(`Extracting details from place url ${request.url}`);
const placeDetail = await extractPlaceDetail(page);
placeDetail.url = request.url;
await Apify.pushData(placeDetail);
log.info(`Finished place url ${request.url}`);
} }
log.info('Finished', request.url);
}, },
handleFailedRequestFunction: async ({ request }) => { handleFailedRequestFunction: async ({ request }) => {
// This function is called when crawling of a request failed too many time // This function is called when crawling of a request failed too many time