function get_robots_txt($url)
{
$parsed = parse_url($url);
$robotstxt = @file("http://{$parsed['host']}/robots.txt");
return $robotstxt;
}
$arr = get_robots_txt('https://www.website.com');
echo "<pre>";
print_r($arr);
echo "</pre>";
/*
run:
Array
(
[0] =>
[1] =>
[2] => Sitemap: http://www.website.com/sitemap.xml
[3] =>
[4] => User-Agent: AB12381bot
[5] => Disallow: /
[6] =>
[7] => User-agent: *
[8] => Disallow: /aboutSomething/
[9] => Disallow: /admin/
...
)
*/