mirror of
https://github.com/davidjohnbarton/crawler-google-places.git
synced 2025-12-12 16:38:45 +00:00
Changed how we add place details to queue
This commit is contained in:
parent
cd4e552685
commit
9ab8642b28
|
|
@ -14,7 +14,7 @@
|
||||||
"proxyConfig": {
|
"proxyConfig": {
|
||||||
"title": "Proxy configuration",
|
"title": "Proxy configuration",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"description": "Use Apify Proxy, you need to have some proxy group to results on Google.",
|
"description": "Use Apify Proxy, you need to have some proxy group to get results on Google.",
|
||||||
"prefill": { "useApifyProxy": true },
|
"prefill": { "useApifyProxy": true },
|
||||||
"editor": "proxy"
|
"editor": "proxy"
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
# Crawler Google Places
|
# Crawler Google Places
|
||||||
This crawler search string on input on [google maps](https://www.google.com/maps) and returns all information about found places.
|
Crawler search string on input on [google maps](https://www.google.com/maps) and returns all information about found places.
|
||||||
|
|
||||||
## How to use through API
|
## How to use through API
|
||||||
How to use Actor from Apify UI see [actor detail page](https://www.apify.com/drobnikj/crawler-google-places).
|
How to use Actor from Apify UI see [actor detail page](https://www.apify.com/drobnikj/crawler-google-places).
|
||||||
|
|
|
||||||
35
src/main.js
35
src/main.js
|
|
@ -16,6 +16,7 @@ const enqueueAllUrlsFromPagination = async (page, requestQueue) => {
|
||||||
const resultsCount = results.length;
|
const resultsCount = results.length;
|
||||||
for (let resultIndex = 0; resultIndex < resultsCount; resultIndex++) {
|
for (let resultIndex = 0; resultIndex < resultsCount; resultIndex++) {
|
||||||
// Need to get results again, pupptr lost context..
|
// Need to get results again, pupptr lost context..
|
||||||
|
await page.waitForSelector('.section-result');
|
||||||
results = await page.$$('.section-result');
|
results = await page.$$('.section-result');
|
||||||
const link = await results[resultIndex].$('h3');
|
const link = await results[resultIndex].$('h3');
|
||||||
await link.click();
|
await link.click();
|
||||||
|
|
@ -46,10 +47,9 @@ Apify.main(async () => {
|
||||||
startUrl = 'https://www.google.com/maps/search/';
|
startUrl = 'https://www.google.com/maps/search/';
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('Start url is ', startUrl);
|
console.log('Start url is', startUrl);
|
||||||
|
|
||||||
const requestQueue = await Apify.openRequestQueue();
|
const requestQueue = await Apify.openRequestQueue();
|
||||||
await requestQueue.addRequest({ url: startUrl, userData: { label: 'startUrl' } });
|
|
||||||
|
|
||||||
// Store state of listing pagination
|
// Store state of listing pagination
|
||||||
// NOTE: Ensured - If pageFunction failed crawler skipped already scraped pagination
|
// NOTE: Ensured - If pageFunction failed crawler skipped already scraped pagination
|
||||||
|
|
@ -63,16 +63,12 @@ Apify.main(async () => {
|
||||||
};
|
};
|
||||||
if (proxyConfig) Object.assign(launchPuppeteerOptions, proxyConfig);
|
if (proxyConfig) Object.assign(launchPuppeteerOptions, proxyConfig);
|
||||||
|
|
||||||
const crawler = new Apify.PuppeteerCrawler({
|
// Enqueue all links to scrape from listings
|
||||||
launchPuppeteerOptions,
|
if (!listingPagination.isFinish) {
|
||||||
requestQueue,
|
console.log(`Start enqueuing place details for search: ${searchString}`);
|
||||||
handlePageTimeoutSecs: 1800, // We are adding all links to queue on startUrl
|
const browser = await Apify.launchPuppeteer(launchPuppeteerOptions);
|
||||||
handlePageFunction: async ({ request, page }) => {
|
const page = await browser.newPage();
|
||||||
const { label } = request.userData;
|
await page.goto(startUrl);
|
||||||
console.log(`Open ${request.url} with label: ${label}`);
|
|
||||||
|
|
||||||
if (label === 'startUrl') {
|
|
||||||
// Enqueue all urls for place detail
|
|
||||||
await page.type('#searchboxinput', searchString);
|
await page.type('#searchboxinput', searchString);
|
||||||
await sleep(5000);
|
await sleep(5000);
|
||||||
await page.click('#searchbox-searchbutton');
|
await page.click('#searchbox-searchbutton');
|
||||||
|
|
@ -100,7 +96,18 @@ Apify.main(async () => {
|
||||||
}
|
}
|
||||||
await sleep(5000);
|
await sleep(5000);
|
||||||
}
|
}
|
||||||
} else {
|
listingPagination.isFinish = true;
|
||||||
|
await Apify.setValue(LISTING_PAGINATION_KEY, listingPagination);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scrape all place detail links
|
||||||
|
const crawler = new Apify.PuppeteerCrawler({
|
||||||
|
launchPuppeteerOptions,
|
||||||
|
requestQueue,
|
||||||
|
handlePageTimeoutSecs: 1800, // We are adding all links to queue on startUrl
|
||||||
|
handlePageFunction: async ({ request, page }) => {
|
||||||
|
const { label } = request.userData;
|
||||||
|
console.log(`Open ${request.url} with label: ${label}`);
|
||||||
// Get data from review
|
// Get data from review
|
||||||
await injectJQuery(page);
|
await injectJQuery(page);
|
||||||
await page.waitForSelector('h1.section-hero-header-title', { timeout: DEFAULT_TIMEOUT });
|
await page.waitForSelector('h1.section-hero-header-title', { timeout: DEFAULT_TIMEOUT });
|
||||||
|
|
@ -146,7 +153,7 @@ Apify.main(async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await Apify.pushData(placeDetail);
|
await Apify.pushData(placeDetail);
|
||||||
}
|
|
||||||
console.log(request.url, 'Done');
|
console.log(request.url, 'Done');
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user