# /robots.txt for http://our.murdoch.edu.au/ # Last modified, 15/02/2017, Trevor Phillips # # use # to suggest html files be skipped on a per file basis User-Agent: funnelback Disallow: User-agent: gsa-crawler User-agent: OmtrBot/1.0 Allow: / User-agent: OpenSearchServer_Bot Allow: / User-agent: * Allow: /Bookshop/ Allow: /Student-life/Finances/ Disallow: /