Changed how we add place details to queue

This commit is contained in:
JakubDrobnik 2018-11-20 16:21:22 +01:00
parent cd4e552685
commit 9ab8642b28
3 changed files with 84 additions and 77 deletions

View File

@ -14,7 +14,7 @@
"proxyConfig": {
"title": "Proxy configuration",
"type": "object",
"description": "Use Apify Proxy, you need to have some proxy group to results on Google.",
"description": "Use Apify Proxy, you need to have some proxy group to get results on Google.",
"prefill": { "useApifyProxy": true },
"editor": "proxy"
},

View File

@ -1,5 +1,5 @@
# Crawler Google Places
This crawler search string on input on [google maps](https://www.google.com/maps) and returns all information about found places.
Crawler search string on input on [google maps](https://www.google.com/maps) and returns all information about found places.
## How to use through API
How to use Actor from Apify UI see [actor detail page](https://www.apify.com/drobnikj/crawler-google-places).

View File

@ -16,6 +16,7 @@ const enqueueAllUrlsFromPagination = async (page, requestQueue) => {
const resultsCount = results.length;
for (let resultIndex = 0; resultIndex < resultsCount; resultIndex++) {
// Need to get results again, pupptr lost context..
await page.waitForSelector('.section-result');
results = await page.$$('.section-result');
const link = await results[resultIndex].$('h3');
await link.click();
@ -46,10 +47,9 @@ Apify.main(async () => {
startUrl = 'https://www.google.com/maps/search/';
}
console.log('Start url is ', startUrl);
console.log('Start url is', startUrl);
const requestQueue = await Apify.openRequestQueue();
await requestQueue.addRequest({ url: startUrl, userData: { label: 'startUrl' } });
// Store state of listing pagination
// NOTE: Ensured - If pageFunction failed crawler skipped already scraped pagination
@ -63,16 +63,12 @@ Apify.main(async () => {
};
if (proxyConfig) Object.assign(launchPuppeteerOptions, proxyConfig);
const crawler = new Apify.PuppeteerCrawler({
launchPuppeteerOptions,
requestQueue,
handlePageTimeoutSecs: 1800, // We are adding all links to queue on startUrl
handlePageFunction: async ({ request, page }) => {
const { label } = request.userData;
console.log(`Open ${request.url} with label: ${label}`);
if (label === 'startUrl') {
// Enqueue all urls for place detail
// Enqueue all links to scrape from listings
if (!listingPagination.isFinish) {
console.log(`Start enqueuing place details for search: ${searchString}`);
const browser = await Apify.launchPuppeteer(launchPuppeteerOptions);
const page = await browser.newPage();
await page.goto(startUrl);
await page.type('#searchboxinput', searchString);
await sleep(5000);
await page.click('#searchbox-searchbutton');
@ -100,7 +96,18 @@ Apify.main(async () => {
}
await sleep(5000);
}
} else {
listingPagination.isFinish = true;
await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination);
}
// Scrape all place detail links
const crawler = new Apify.PuppeteerCrawler({
launchPuppeteerOptions,
requestQueue,
handlePageTimeoutSecs: 1800, // We are adding all links to queue on startUrl
handlePageFunction: async ({ request, page }) => {
const { label } = request.userData;
console.log(`Open ${request.url} with label: ${label}`);
// Get data from review
await injectJQuery(page);
await page.waitForSelector('h1.section-hero-header-title', { timeout: DEFAULT_TIMEOUT });
@ -146,7 +153,7 @@ Apify.main(async () => {
}
}
await Apify.pushData(placeDetail);
}
console.log(request.url, 'Done');
},
});