summaryrefslogtreecommitdiff
path: root/src/crawl/crawl.conf
blob: 0a9947cd77d758f66c8081c6f46ec3c3066e4bea (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
local dbg = require( "crawl/debugger" )

-- global settings

crawler = {
	-- stop after N documents
	stop_after_N_operations = 0,
	
	module_path = "modules",
	
	modules_search_recursive = true
}

logger = {
	level = "DEBUG"
}

modules = {
	urlnormalizers = {
		"mod_urlnormalizer_simple",
		"mod_urlnormalizer_googleurl"
	},
	
	urlfilters = {
		"mod_urlfilter_host",
		"mod_urlfilter_protocol"
	},

	urlchainfilters = {
		"mod_urlfilter_chain"
	},
	
	urlfrontiers = {
		"mod_frontier_memory"
	},
	
	fetchers = {
		"mod_fetcher_libfetch",
		"mod_fetcher_libcurl",
		"mod_fetcher_winhttp"
	},
	
	urlseens = {
		"mod_urlseen_memory"
	},
	
	dedupers = {
		"mod_deduper_null"
	},
	
	processors = {
		"mod_processor_htmllinkextract",
		"mod_processor_robotstxt",
		"mod_processor_sitemap"
	},
	
	typedetects = {
		"mod_typedetect_libmagic"
	}
	
}

-- seeds: URLS which are fed in the beginning to the URL frontier

seeds = {
	"http://www.wolframe.net",
	"http://wolframe.net"
}

filters = {
	-- allowed protocols to be fetched
	protocols = {
		"http",
		"https"
	},
	
	-- allowed hosts to be crawled (FQDN)
	hosts = {
		"www.wolframe.net",
		"wolframe.net"
	}
}


function init( )
	log.notice( "Init.." )
	-- normalizer = urlnormalizers.create( "google_urlnormalizer" );
	normalizer = GoogleURLNormalizer:new( )
	-- normalizer2 = urlnormalizers.create( "simple_urlnormalizer" );
	normalizer2 = SimpleURLNormalizer:new( )
	base = tolua.cast( normalizer, "URLNormalizer" )
	log.debug( "type: " .. tolua.type( base )  )
end

function destroy( )
	log.notice( "Destroy.." )
	normalizer:delete( )
end

function crawl( )
	--dbg( )
	log.notice( "Crawling.." )
	local baseUrl = base:parseUrl( "http://www.base.com" )
	log.debug( "base URL is: " .. baseUrl:str( ) )
	local url = base:normalize( baseUrl, "/relativedir/relativefile.html" )
	log.debug( "URL is: " .. url:str( ) )
end