Blame view

samples/dynamic.coffee 1.42 KB
1 2
casper = require("casper").create
    verbose: true
3

4
# The base links array
5
links = [
6 7 8
    "http://google.com/"
    "http://yahoo.com/"
    "http://bing.com/"
9 10
]

11 12 13 14
currentLink = 0;

# If we don't set a limit, it could go on forever
upTo = ~~casper.cli.get(0) || 10
15

16 17 18 19
###
Get the links, and add them to the links array
(It could be done all in one step, but it is intentionally splitted)
###
20 21 22 23 24 25
addLinks = (link) ->
    @then ->
        found = @evaluate searchLinks
        @echo "#{found.length} links found on #{link}"
        links = links.concat found

26 27 28 29 30 31 32 33 34 35 36
###
Fetch all <a> elements from the page and return
the ones which contains a href starting with 'http://'
###
searchLinks = ->
    filter = Array::filter
    map = Array::map
    map.call filter.call(document.querySelectorAll("a"), (a) ->
        (/^http:\/\/.*/i).test a.getAttribute("href")
    ), (a) ->
        a.getAttribute "href"
37

38 39 40 41
# Just opens the page and prints the title
start = (link) ->
    @start link, ->
        @echo "Page title: #{ @getTitle() }"
42

43
# As long as it has a next link, and is under the maximum limit, will keep running
44 45 46 47 48 49 50 51
check = ->
    if links[currentLink] && currentLink < upTo
        @echo "--- Link #{currentLink} ---"
        start.call @, links[currentLink]
        addLinks.call @, links[currentLink]
        currentLink++
        @run check
    else
52
        @echo "All done."
53 54
        @exit()

55 56 57 58 59
casper.start()

casper.then ->
    @echo "Starting"

60
casper.run check