robots.txt details

saved: size: 1059 kb md5 checksum: ed5b19f892174be1cf227fe2b4147488

websites using this robots.txt

content


# If you would like to crawl GitHub contact us at support@github.com.
# We also provide an extensive API: https://developer.github.com/

User-agent: baidu
crawl-delay: 1


User-agent: *

Disallow: */pulse
Disallow: */tree/
Disallow: */blob/
Disallow: */wiki/
Disallow: /gist/
Disallow: */forks
Disallow: */stars
Disallow: */download
Disallow: */revisions
Disallow: */issues/new
Disallow: */issues/search
Disallow: */commits/
Disallow: */commits/*?author
Disallow: */commits/*?path
Disallow: */branches
Disallow: */tags
Disallow: */contributors
Disallow: */comments
Disallow: */stargazers
Disallow: */archive/
Disallow: */blame/
Disallow: */watchers
Disallow: */network
Disallow: */graphs
Disallow: */raw/
Disallow: */compare/
Disallow: */cache/
Disallow: /.git/
Disallow: */.git/
Disallow: /*.git$
Disallow: /search/advanced
Disallow: /search
Disallow: */search
Disallow: /*q=
Disallow: /*.atom

Disallow: /ekansa/Open-Context-Data
Disallow: /ekansa/opencontext-*
Disallow: */tarball/
Disallow: */zipball/

Disallow: /account-login
Disallow: /Explodingstuff/