blob: 9421d3baac52253b0de9424306a798399ee14d95 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
|
#
# robots.txt
#
# This file is to prevent the crawling and indexing of certain parts
# of your site by web crawlers and spiders run by sites like Yahoo!
# and Google. By telling these "robots" where not to go on your site,
# you save bandwidth and server resources.
#
# This file will be ignored unless it is at the root of your host:
# Used: http://example.com/robots.txt
# Ignored: http://example.com/site/robots.txt
#
# For more information about the robots.txt standard, see:
# http://www.robotstxt.org/robotstxt.html
User-agent: *
Crawl-delay: 10
# CSS, JS, Images
# Allow:
# Directories
Disallow: /soc/
Disallow: /static/
Disallow: /website/
# Files
Disallow: /run.sh
Disallow: /tornado_main.py
Disallow: /instances.py
Disallow: /requirements.txt
# Paths (clean URLs)
Disallow: /admin/
Disallow: /get_code/
Disallow: /get_books/
Disallow: /get_chapters/
Disallow: /get_examples/
Disallow: /get_contributor/
Disallow: /get_bug_form/
Disallow: /get_bug_form_submit/
Disallow: /get_diff/
Disallow: /get_revisions/
Disallow: /submit-revision/
Disallow: /search_book/
Disallow: /search_book/popular/
Disallow: /search_book/recent/
Disallow: /get_subcategories/
Disallow: /get_node/
Disallow: /get_submit_revision_form/
Disallow: /get_submit_revision_form_submit/
Disallow: /review/get_review_revision/
Disallow: /review/get_push_revision/
Disallow: /review/get_remove_revision/
Disallow: /review/
|