-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconfig.ini
74 lines (52 loc) · 2.02 KB
/
config.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
[aws]
; credentials from the your AWS security credientials page. This will be used
; for all SQS queues, EC2 instances, and S3 buckets created
access_key_id =
secret_access_key =
; the region everything will be run in, keep it in the same region Common
; Crawl's datasets are hosted in for best performance
region = us-east-1
[sqs]
; the name of the queue to be made to store WAT paths (can't already exist)
queue =
; file containing all WAT paths on S3 on seperate lines. Find and extract the
; latest from Common Crawl's website
path_file = wat.paths
; the number of WAT paths to be used. Set to the number of lines in path_cap to
; use all of Common Crawls WAT data
path_cap =
[ec2]
; the AMI all EC2 instances will use
ami = ami-1ecae776
; the default username of ami
user_name = ec2-user
; the type of instance all EC2 instances will be set to
type = m3.xlarge
; the volume device and size in GB that will be created for each EC2 instance.
; set the defice to the default Root volume device of your ami to override it
; instead of creating two
volume_device = /dev/xvda
volume_size = 11
; the name and local path of the key pair to be used for all EC2 instances
key_name =
key_path =
; the security group to be used for all EC2 instances. Only requirement is
; allowing SSH connections (port 22 on TCP)
security_group = ssh
; the number of EC2 instances to use. adding instances speeds up the process
; but costs a bit more because of volumes created. check the AWS pricing
; calculator.
instances =
[s3]
; the bucket to save result files (urls with scripts with matching regex). Must
; already exist. Should not contain periods or may get false warnings
results_bucket =
[commoncrawl]
; prepended to all URIs in path_file to form the full URL
base = http://aws-publicdatasets.s3.amazonaws.com/
[scraper]
; the number of scraper threads to be created in each EC2 instance
thread_count = 5
; the regex to match script URLs to. NOTE: script URL must FULLY match regex,
; so it's usually a good idea to surround your regex with .*
regex =