-
Notifications
You must be signed in to change notification settings - Fork 0
/
vcrss.rb
123 lines (101 loc) · 2.99 KB
/
vcrss.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
require 'digest'
require 'rss'
require 'time'
require 'yaml'
# Create necessary dirs unless they already exists
Dir.mkdir('downloads') unless File.exists?('downloads')
Dir.mkdir('log') unless File.exists?('log')
# Load YML config file
CONFIG = YAML.load_file('config.yml')
# Correct config.yml?
if !CONFIG['feeds']
puts 'Invalid config file. Please see README.md for an example.'
exit
end
# Loop feeds
CONFIG["feeds"].each_with_index do |feed, i|
# Create unique ID for each feed based on the URL and the number in the loop
feed_md5 = Digest::MD5.new
feed_md5.update "#{feed['url']}"
feed_md5.hexdigest
# Create log file?
if !File.file?("log/feed-#{feed_md5}.log")
File.new("log/feed-#{feed_md5}.log", 'w+')
data = {'log_setup' => Time.now}
File.open("log/feed-#{feed_md5}.log", 'w') {|f| f.write(data.to_yaml) }
end
# Load log file
log = YAML.load_file("log/feed-#{feed_md5}.log")
# Load last downloaded pub date for feed
if !log["#{feed_md5}_date"]
log_date = 0
else
log_date = log["#{feed_md5}_date"].to_i
end
# Load last downloaded link for feed
if !log["#{feed_md5}_link"]
log_link = ''
else
log_link = log["#{feed_md5}_link"]
end
# Read feed
rss = RSS::Parser.parse(feed['url'], false)
# Reverse loop to make sure that newest item comes last
rss.items.reverse_each do |item|
download = true
# Check feed type and make the correct values are set
if rss.feed_type == 'rss'
item_title = item.title
item_date = item.pubDate.to_time.to_i
item_link = item.link
elsif rss.feed_type == 'atom'
item_title = item.title.content
item_date = item.published.content.to_time.to_i
item_link = item.link.href
end
# Are there filters? If so check if the item passes them.
if feed['filters']
download = false
feed['filters'].each do |filter|
if item_title.include? "#{filter}"
download = true
end
end
next if !download
end
# Is the item the older than the log date? If so skip the item.
if item_date <= log_date
next
end
# Time to download?
if download
puts "Downloading #{item_title}…"
# Set binary
binary = 'youtube-dl'
if feed['binary']
binary = feed['binary']
end
# Set options
options = " --all-subs -o 'downloads/%(title)s-%(id)s.%(ext)s'"
if feed['options']
options = " #{feed['options']}"
end
# Set pipes
pipes = ""
if feed['pipes']
pipes = " #{feed['pipes']}"
end
# Run download task…
system "#{binary}#{options} #{item_link}#{pipes}"
# Update log
log_data = {
"#{feed_md5}_feed" => feed['url'],
"#{feed_md5}_updated" => Time.now.to_i,
"#{feed_md5}_date" => item_date,
"#{feed_md5}_link" => "#{item_link}",
"#{feed_md5}_title" => "#{item_title}"
}
File.open("log/feed-#{feed_md5}.log", 'w') {|f| f.write(log_data.to_yaml) }
end
end
end