I'm attempting to scrape dynamic paging websites with Nightmare / Electron. I don't see a way to perform a do... until with nightmare functions OR a way to chain evaluate calls with logic.
Here's a simple code example that merely Googles a phrase and returns the result hrefs from page 1. I'd like this code to continue for each page in the results.
var Nightmare = require('nightmare');
var vo = require('vo');
vo(function* () {
  var nightmare = Nightmare({ show: true });
  var links = yield nightmare
    .goto('http://www.google.com')
    .wait('input[title="Search"]')
    .click('input[title="Search"]')
    .type('input[title="Search"]', 'Anequim Project')
    .click('input[name="btnK"]')
    .wait(600)
    .evaluate(function(){
        var linkArray = [];
        var links = document.querySelectorAll('h3.r a');
        for (var i = 0; i < links.length; ++i) {
            linkArray.push(links[i].getAttribute('href'));
        }
        return linkArray;
    });
  yield nightmare.end();
  return links;
})(function (err, result) {
  if (err) return console.log(err);
  console.log(result);
});
The following code example is a modified version of a solution provided by rosshinkley of the segmentio/nightmare project. This still needs some work as it is not 100% reliable from my tests at this point with Nightmare version 2.1.2 but is an excellent starting point.
Note: When testing if you run it more than X times Google will require a captcha.
var Nightmare = require('nightmare');
var vo = require('vo');
vo(run)(function(err, result) {
    if (err) throw err;
});
function* run() {
    var nightmare = Nightmare({ show: true }), 
        MAX_PAGE = 100, 
        currentPage = 0, 
        nextExists = true, 
        links = []; 
    yield nightmare 
        .goto('http://www.google.com')
        .wait('input[title="Search"]')
        .click('input[title="Search"]')
        .type('input[title="Search"]', 'Anequim Project')
        .click('input[name="btnK"]') 
        .wait(2000)
    nextExists = yield nightmare.visible('#pnnext'); 
    while (nextExists && currentPage < MAX_PAGE) { 
        links.push(yield nightmare 
            .evaluate(function() { 
                var linkArray = [];
                var links = document.querySelectorAll('h3.r a');
                return links[0].href; 
            })); 
        yield nightmare 
            .click('#pnnext')
            .wait(2000)
        currentPage++; 
        nextExists = yield nightmare.visible('#pnnext'); 
    } 
    console.dir(links); 
    yield nightmare.end(); 
} 
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With