public abstract class BaseRobotRules extends Object implements Serializable
Modifier and Type | Field and Description |
---|---|
static long |
UNSET_CRAWL_DELAY |
Constructor and Description |
---|
BaseRobotRules() |
Modifier and Type | Method and Description |
---|---|
void |
addSitemap(String sitemap)
Add sitemap URL to rules if not a duplicate
|
boolean |
equals(Object obj) |
long |
getCrawlDelay() |
List<String> |
getSitemaps()
Get URLs of sitemap links found in robots.txt
|
int |
hashCode() |
abstract boolean |
isAllowAll() |
abstract boolean |
isAllowed(String url) |
abstract boolean |
isAllowNone() |
boolean |
isDeferVisits() |
void |
setCrawlDelay(long crawlDelay) |
void |
setDeferVisits(boolean deferVisits) |
String |
toString()
Returns a string with the crawl delay as well as a list of sitemaps if
they exist (and aren't more than 10)
|
public static final long UNSET_CRAWL_DELAY
public abstract boolean isAllowed(String url)
public abstract boolean isAllowAll()
public abstract boolean isAllowNone()
public long getCrawlDelay()
public void setCrawlDelay(long crawlDelay)
public boolean isDeferVisits()
public void setDeferVisits(boolean deferVisits)
public void addSitemap(String sitemap)
Copyright © 2009–2021 Crawler-Commons. All rights reserved.