|
马上注册,结交更多好友,享用更多功能,让你轻松玩转社区。
您需要 登录 才可以下载或查看,没有账号?注册
×
先建立一个 robots 文件夹
<%
Sub robot()
Dim robots:robots="Baiduspider+@Baidu|Googlebot@Google|ia_archiver@Alexa|IAArchiver@Alexa|ASPSeek@ASPSeek|YahooSeeker@Yahoo|sohu-search@Sohu|help.yahoo.com/help/us/ysearch/slurp@Yahoo|sohu-search@SOHU|MSNBOT@MSN"
dim I1,I2,l1,l2,l3,i,rs
l2=false
l1=request.servervariables("http_user_agent")
F1=request.ServerVariables("SCRIPT_NAME")
I1=split(robots,chr(124))
for i=0 to ubound(I1)
I2=split(I1(i),"@")
if instr(lcase(l1),lcase(I2(0)))>0 then
l2=true:l3=I2(1):exit for
end if
next
if l2 and len(l3)>0 then'如果是爬虫,就更新爬虫信息
FilePath = Server.Mappath("robots/"&l3&"_robots.txt")
'记录蜘蛛爬行
Set Fso = Server.CreateObject("Scripting.FileSystemObject")
Set Fout = Fso.OpenTextFile(FilePath,8,True)
Fout.WriteLine "索引页面:"&F1
Fout.WriteLine "蜘蛛:"&l3&chr(32)&chr(32)&"更新时间:"&Now()
Fout.WriteLine "-----------------------------------------------"
Fout.Close
Set Fout = Nothing
Set Fso = Nothing
end if
end Sub
%> |
|