@inproceedings{8de91269588c4a0889a437aa557d6ceb,
title = "BotSeer: An automated information system for analyzing Web robots",
abstract = "Robots.txt files are vital to the web since they are supposed to regulate what search engines can and cannot crawl. We present BotSeer, a Web-based information system and search tool that provides resources and services for researching Web robots and trends in Robot Exclusion Protocol deployment and adherence. BotSeer currently indexes and analyzes 2.2 million robots.txt files obtained from 13.2 million websites, as well as a large Web server log of real-world robot behavior and related analyses. BotSeer provides three major services including robots.txt searching, robot bias analysis, and robot-generated log analysis. BotSeer serves as a resource for studying the regulation and behavior of Web robots as well as a tool to inform the creation of effective robots.txt files and crawler implementations.",
author = "Yang Sun and Councill, {Isaac G.} and Giles, {C. Lee}",
year = "2008",
doi = "10.1109/ICWE.2008.27",
language = "English (US)",
isbn = "9780769532615",
series = "Proceedings - 8th International Conference on Web Engineering, ICWE 2008",
pages = "108--114",
booktitle = "Proceedings - 8th International Conference on Web Engineering, ICWE 2008",
note = "8th International Conference on Web Engineering, ICWE 2008 ; Conference date: 14-07-2008 Through 18-07-2008",
}