forked from paxed/Rodney
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathredditrss.pm
More file actions
136 lines (105 loc) · 3.02 KB
/
redditrss.pm
File metadata and controls
136 lines (105 loc) · 3.02 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
package RedditRSS;
use strict;
use warnings;
use diagnostics;
use HTTP::Date;
sub new {
my $class = shift;
my %args = @_;
return bless \%args, $class;
}
sub init {
my $self = shift;
my $rss_url = shift || $self->{'rss_url'};
my $cachedir = shift || $self->{'cachedir'};
$self->{'cachefile'} = $rss_url;
$self->{'cachefile'} =~ s/^http:\/\///;
$self->{'cachefile'} =~ tr/\//./;
$self->{'cachedir'} = $cachedir if (!$self->{'cachedir'});
$self->{'cachedir'} = '.' if (!$self->{'cachedir'});
$self->{'cachedir'} .= '/' if (!($self->{'cachedir'} =~ m/\/$/));
$self->{'rss_url'} = $rss_url if (!$self->{'rss_url'});
$self->{'cachedate'} = 0;
$self->cache_read();
}
sub cache_read {
my $self = shift;
open(CACHEFILE,$self->{'cachedir'}.$self->{'cachefile'}) || return;
while (<CACHEFILE>) {
my $line = $_;
$line =~ s/\n+$//;
$self->{'cachedate'} = scalar($line);
}
close(CACHEFILE);
}
sub cache_write {
my $self = shift;
my $line = $self->{'cachedate'};
open(CACHEFILE,'>'.$self->{'cachedir'}.$self->{'cachefile'}) || return;
print CACHEFILE "$line\n";
close(CACHEFILE);
}
sub parse_reddit_rss_itempart {
my ($part, $data, $itemref) = @_;
if ($data =~ m/<\Q$part\E>(.+?)<\/\Q$part\E>/) {
my $title = $1;
$data =~ s/<\Q$part\E>.+?<\/\Q$part\E>//;
$itemref->{$part} = $title;
}
return $data;
}
sub parse_reddit_rss {
my $data = shift;
my @itemlist = ();
while ($data =~ m/<item>(.+?)<\/item>/) {
my $itemdata = $1;
my %item = ();
$itemdata = parse_reddit_rss_itempart('title', $itemdata, \%item);
$itemdata = parse_reddit_rss_itempart('link', $itemdata, \%item);
$itemdata = parse_reddit_rss_itempart('pubDate', $itemdata, \%item);
$itemdata = parse_reddit_rss_itempart('description', $itemdata, \%item);
push(@itemlist, \%item);
$data =~ s/<item>.+?<\/item>//;
}
return @itemlist;
}
sub update_rss {
my $self = shift;
my @bugfile = `/usr/bin/wget --timeout=20 --quiet -O - $self->{'rss_url'}`;
my $rssdata = join('', @bugfile);
my @items = parse_reddit_rss($rssdata);
my $nitems = scalar(@items);
my $retstr;
return $retstr if ($nitems < 1);
my $i;
for ($i = 0; $i < $nitems; $i++) {
if (scalar(str2time($items[$i]->{pubDate})) > $self->{'cachedate'}) {
$retstr = 'Reddit: '.$items[$i]->{title}.' ';
my $lnk = $items[$i]->{link};
my $url = 'http://redd.it/';
if ($lnk =~ m/^.+?\/comments\/([a-z0-9]+)\//) {
$url .= $1;
} else {
$url = $items[$i]->{link};
}
$retstr .= $url;
$self->{'cachedate'} = scalar(str2time($items[$i]->{pubDate}));
return $retstr;
}
}
return $retstr;
}
1;
__END__
# slurp the whole file, not each line.
#undef $/;
#my $infile = 'nethack.rss';
#open INFILE, $infile or die "Could not open $infile: $!";
#my $f = <INFILE>;
#close INFILE;
#my @i = parse_reddit_rss($f);
#my $nitems = scalar(@i);
#print $i[0]->{pubDate}."\n";
#my $t = str2time($i[0]->{pubDate});
#print $t."\n";
#print time2str($t)."\n";