小兔网

# encoding: utf-8require 'thread'require 'nokogiri'require 'open-uri'require 'rss/maker' $result=Queue.newdef extract_readme_header(no,name,url)  frame = Nokogiri::HTML(open(url))  return unless frame  readme=$url+frame.css('frame')[1]['src']  return unless readme  open(readme) do |f|    doc = Nokogiri::HTML(f.read)    text=doc.css("div#content div#filecontents p")[0..4].map { |c| c.content }.join(" ").strip    return if text.length==0    if text !~ /(rails)|(activ_)/i      puts "========= #{no} #{name} : #{text[0..50]}"      date = f.last_modified      $result << [no,name,readme,date,text]    end  endrescue  puts $!.to_send def make_rss(items)  RSS::Maker.make("2.0") do |m|    m.channel.title = "GtitHub recently updated projects"    m.channel.link = "http://localhost"    m.channel.description = "GitHub recently updated projects"    m.items.do_sort = true    items.each do |no,name,url,date,descr|      i = m.items.new_item      i.title = name      i.link = url      i.description=descr      i.date = date    end  endend ############################## M A I N ######################## ############# Scan list of recent project lth=[]$url="http://rdoc.info"puts "get url #{$url}..."doc = Nokogiri::HTML(open($url))doc.css('ul.libraries')[1].css('li').each_with_index do |li,i|  aname =li.css('a').first  name=aname.content  purl=$url+aname['href']  lth << Thread.new(i,name,purl) { |j,n,u| extract_readme_header(j,n,u)  }end ################ wait all readme are read lth.each { |th| th.join() } ################ dequeue results and sort them by date descending result=[]result << $result.shift while $result.size>0result.sort!  { |a,b| a[0] <=> b[0] }  ################ format results in rss File.open("RubyFeeds.rss","w") do |file|  file.write make_rss(result)end

以上就是使用Ruby和Nokogiri模拟爬虫导出RSS种子的实例详解的知识。速戳>>知识兔学习精品课!