#!/usr/bin/env ruby

# Add the path to your feedcache directory here
FCPIPES_DIR = '/path/to/your/feedcache-pipes/plugin/directory'
# How many characters from each feed item do you want to display
CHAR_COUNT = 75
# Set to 'true' if you want to receive error emails from the CRON job
CRON_EMAILS = false


#################################################################
#                                                               #
#  DO NOT EDIT BELOW THIS LINE                                  #
#                                                               #
#################################################################
$LOAD_PATH << File.expand_path(File.dirname(__FILE__))

require 'net/http'
require 'lib/feedparser'
require 'uri'
require 'yaml'
require 'tempfile'

# Read master config settings
CONFIG_FILE = "#{FCPIPES_DIR}/files/fcpipes-config.txt"
CACHE_FILE = "#{FCPIPES_DIR}/files/fcpipes-cache.txt"
MASTER_CONFIG = "#{FCPIPES_DIR}/master-config.txt"
cfg = File.open(MASTER_CONFIG, 'r') do |f|
  @params = f.gets.split('~');
end
# parse the parameters from the config file
@display_num  = @params[0].strip
@format_text  = @params[1].strip == 'true' ? true : false
@link_target  = @params[2].strip == 'true' ? '_blank' : '_self'

# RSS formatting function
def shorten_text(txt)
  if txt.size > CHAR_COUNT
    @text = "#{txt} ".slice(0,CHAR_COUNT)
    # need to break on the last space
    if @text.include?(' ') and @text.slice(@text.size-1, 1) != ' '
      @text.slice!(0, @text.size - (@text.reverse.index(' ') + 1))
      @text << '...'
    end
    return @text
  else
    return txt
  end
end

begin # read the config file settings
  @all_feeds = Array.new
  config = File.open(CONFIG_FILE, 'r') do |f|
    while line = f.gets
      @all_feeds << line.strip
    end
  end
rescue => e
  if CRON_EMAILS
    puts "Error reading configuration file"
    puts YAML.dump(e)
  end
end  

@tmp = Tempfile.new("fcpipes")
@processed = false
@entries = Hash.new

# process the feed list
@all_feeds.each do |feed|
  begin
    source = Net::HTTP::get URI::parse(feed)
    fp = FeedParser::Feed::new(source)
    fp.items.each do |item|
      @entries[item.date.to_i] = item
    end
  rescue => e
    if CRON_EMAILS
      puts "Error processing feed - #{feed}"
      puts YAML.dump(e)
    end
  end
end #--> @all_feeds.each do |feed|
  
# sort the feeds by timestamp then process the output
@count = 1
@html_text = "<ul class='fcpipes'>"
@entries.sort {|a,b| b[0]<=>a[0]}.each do |k, item|
  break if @count > @display_num.to_i
  output = ''
  output << "<li class='fcpipes-item'><a href='#{item.link}' target='#{@link_target}'>"
  if @format_text
    txt = "#{item.title.downcase.gsub(/^[a-z]|\s+[a-z]/) {|a| a.upcase}}"
    output << (shorten_text(txt) + "#{item.date}")
  else
    output << "#{item.title}"
  end
  output << "</a></li>\n"
  @html_text << output
  @count += 1
end
@html_text << "</ul><br />\n"
@tmp << @html_text
@processed = true
@tmp.close

# if we had new feeds, move them to the cache file
if @processed
  @tmp.open
  @cache = File::open(CACHE_FILE, "w")
  @cache << @tmp.gets(nil)
  @cache.close
  @tmp.close(true) # remove the /tmp file
end
