nginx禁止垃圾蜘蛛访问

/www/server/nginx/conf文件夹下建立agent_deny.conf文件
nginx配置文件里加入include agent_deny.conf;

server
{
    include agent_deny.conf;
    listen 80;
#禁止Scrapy等工具的抓取
if ($http_user_agent ~* (Scrapy|Curl|HttpClient)) {
     return 403;
}

#禁止指定UA及UA为空的访问
if ($http_user_agent ~ "opensiteexplorer|BLEXBot|MauiBot|SemrushBot|DotBot|WinHttp|WebZIP|FetchURL|node-superagent|java/|yisouspider|FeedDemon|Jullo|JikeSpider|Indy Library|Alexa Toolbar|AskTbFXTV|AhrefsBot|CrawlDaddy|Java|Feedly|Apache-HttpAsyncClient|UniversalFeedParser|ApacheBench|M
icrosoft URL Control|Swiftbot|ZmEu|oBot|jaunty|Python-urllib|lightDeckReports Bot|YYSpider|DigExt|HttpClient|MJ12bot|heritrix|EasouSpider|Ezooms|BOT/0.1|YandexBot|FlightDeckReports|Linguee Bot|^$" ) {     return 403;
}

#禁止非GET|HEAD|POST方式的抓取
if ($request_method !~ ^(GET|HEAD|POST)$) {
    return 403;
}

你可能感兴趣的:(nginx禁止垃圾蜘蛛访问)