0
|
1 #!/usr/bin/tclsh
|
|
2 ############################################################################
|
|
3 #
|
|
4 # FeedCheck fetcher v0.7 by ccr/TNSP <ccr@tnsp.org>
|
|
5 # (C) Copyright 2008-2010 Tecnic Software productions (TNSP)
|
|
6 #
|
|
7 # This script is freely distributable under GNU GPL (version 2) license.
|
|
8 #
|
|
9 ############################################################################
|
|
10
|
|
11 # Datafile, MUST be set to same as in feeds.tcl
|
|
12 set datafile "/home/niinuska/bot/data.feeds"
|
|
13
|
|
14 # Use a HTTP proxy? 1 = yes, 0 = no
|
|
15 set http_proxy 0
|
|
16
|
|
17 # HTTP proxy address and port
|
|
18 set http_proxy_host "cache.inet.fi"
|
|
19 set http_proxy_port 800
|
|
20
|
|
21
|
|
22 ##############################################################################
|
|
23 set html_ent [split " | |»|>>|"|\"|ä|ä|ö|ö|Ä|Ä|Ö|Ö|&|&|<|<|>|>|ä|ä|ö|ö|Ä|Ä" "|"]
|
|
24
|
|
25 package require http
|
|
26 ::http::config -urlencoding iso8859-1 -useragent "Mozilla/4.0 (compatible; MSIE 6.0; MSIE 5.5; Windows NT 5.0) Opera 9.5"
|
|
27 if {$http_proxy != 0} {
|
|
28 ::http::config -proxyhost $http_proxy_host -proxyport $http_proxy_port
|
|
29 }
|
|
30
|
|
31
|
|
32 proc convert_ent {udata} {
|
|
33 global html_ent
|
|
34 return [string map $html_ent $udata]
|
|
35 }
|
|
36
|
|
37
|
|
38 proc add_entry {uname uprefix uurl udesc} {
|
|
39 global entries isentries newurls currclock
|
|
40 set utest "$uprefix[convert_ent $uurl]"
|
|
41 set isentries($utest) 1
|
|
42 if {[catch {set utmp $entries($utest)}]} {
|
|
43 set entries($utest) [list $currclock $uname $utest [convert_ent $udesc]]
|
|
44 incr newurls
|
|
45 }
|
|
46 }
|
|
47
|
|
48
|
|
49 proc add_rss_feed {datauri dataname dataprefix} {
|
|
50 if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} {
|
|
51 puts "Error getting $datauri: $uerrmsg"
|
|
52 return 1
|
|
53 }
|
|
54 set upage [::http::data $utoken]
|
|
55 ::http::cleanup $utoken
|
|
56
|
|
57 set umatches [regexp -all -nocase -inline -- "<item>.\*\?<title><..CDATA.(.\*\?)\\\]\\\]></title>.\*\?<link>(http.\*\?)</link>.\*\?</item>" $upage]
|
|
58 set nmatches [llength $umatches]
|
|
59 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
60 add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]]
|
|
61 }
|
|
62
|
|
63 if {$nmatches == 0} {
|
|
64 set umatches [regexp -all -nocase -inline -- "<item>.\*\?<title>(.\*\?)</title>.\*\?<link>(http.\*\?)</link>.\*\?</item>" $upage]
|
|
65 set nmatches [llength $umatches]
|
|
66 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
67 add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]]
|
|
68 }
|
|
69 }
|
|
70
|
|
71 if {$nmatches == 0} {
|
|
72 set umatches [regexp -all -nocase -inline -- "<item \[^>\]*>.\*\?<title>(.\*\?)</title>.\*\?<link>(http.\*\?)</link>.\*\?</item>" $upage]
|
|
73 set nmatches [llength $umatches]
|
|
74 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
75 add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]]
|
|
76 }
|
|
77 }
|
|
78 return 0
|
|
79 }
|
|
80
|
|
81
|
|
82 ##############################################################################
|
|
83 ### Luetaan vanhat paskat
|
|
84 set oldurls 0
|
|
85 set newurls 0
|
|
86 if {![catch {set ufile [open $datafile r 0600]} uerrmsg]} {
|
|
87 while {![eof $ufile]} {
|
|
88 gets $ufile uline
|
|
89 set urec [split $uline "½"]
|
|
90 if {[llength $urec] == 4} {
|
|
91 set entries([lindex $urec 2]) $urec
|
|
92 set isentries([lindex $urec 2]) 0
|
|
93 incr oldurls
|
|
94 }
|
|
95 }
|
|
96 close $ufile
|
|
97 }
|
|
98 set currclock [clock seconds]
|
|
99
|
|
100
|
|
101 ##############################################################################
|
|
102 ### Haetaan ja parsitaan Halla-ahon jutut
|
|
103 set datauri "http://www.halla-aho.com/scripta/";
|
|
104 set dataname "Mestari"
|
|
105 if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} {
|
|
106 puts "Error getting $datauri: $uerrmsg"
|
|
107 } else {
|
|
108 set upage [::http::data $utoken]
|
|
109 ::http::cleanup $utoken
|
|
110
|
|
111 set umatches [regexp -all -nocase -inline -- "<a href=\"(\[^\"\]+\.html)\"><b>(\[^<\]+)</b>" $upage]
|
|
112 set nmatches [llength $umatches]
|
|
113 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
114 add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]]
|
|
115 }
|
|
116
|
|
117 set umatches [regexp -all -nocase -inline -- "<a href=\"(\[^\"\]+\.html)\">(\[^<\]\[^b\]\[^<\]+)</a>" $upage]
|
|
118 set nmatches [llength $umatches]
|
|
119 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
120 add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]]
|
|
121 }
|
|
122 }
|
|
123
|
|
124
|
|
125 ### The Adventurers
|
|
126 set datauri "http://www.peldor.com/chapters/index_sidebar.html";
|
|
127 set dataname "The Adventurers"
|
|
128 if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} {
|
|
129 puts "Error getting $datauri: $uerrmsg"
|
|
130 } else {
|
|
131 set upage [::http::data $utoken]
|
|
132 ::http::cleanup $utoken
|
|
133
|
|
134 set umatches [regexp -all -nocase -inline -- "<a href=\"(\[^\"\]+)\">(\[^<\]+)</a>" $upage]
|
|
135 set nmatches [llength $umatches]
|
|
136 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
137 add_entry $dataname "http://www.peldor.com/" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]]
|
|
138 }
|
|
139 }
|
|
140
|
|
141
|
|
142 ### Order of the Stick
|
|
143 set datauri "http://www.giantitp.com/comics/oots.html";
|
|
144 set dataname "OOTS"
|
|
145 if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} {
|
|
146 puts "Error getting $datauri: $uerrmsg"
|
|
147 } else {
|
|
148 set upage [::http::data $utoken]
|
|
149 ::http::cleanup $utoken
|
|
150
|
|
151 set umatches [regexp -all -nocase -inline -- "<a href=\"(/comics/oots\[0-9\]+\.html)\">(\[^<\]+)</a>" $upage]
|
|
152 set nmatches [llength $umatches]
|
|
153 for {set n 0} {$n < $nmatches} {incr n 3} {
|
|
154 add_entry $dataname "http://www.giantitp.com" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]]
|
|
155 }
|
|
156 }
|
|
157
|
|
158
|
|
159 ### Hae RSS-feedit
|
|
160 add_rss_feed "http://www.kaleva.fi/rss/145.xml" "Kaleva/Tiede" ""
|
|
161
|
|
162 #add_rss_feed "http://sektori.com/?tpl=rssNewsFeed" "Sektori" ""
|
|
163
|
|
164 add_rss_feed "http://www.effi.org/xml/uutiset.rss" "EFFI" ""
|
|
165
|
|
166 add_rss_feed "http://www.mtv3.fi/rss/uutiset_rikos.rss" "MTV3/Rikos" ""
|
|
167
|
|
168 add_rss_feed "http://www.blastwave-comic.com/rss/blastwave.xml" "Blastwave" ""
|
|
169
|
|
170 #add_rss_feed "http://lehti.samizdat.info/feed/" "Lehti" ""
|
|
171
|
|
172 ##############################################################################
|
|
173 ### Avataan tulostiedosto
|
|
174 set tmpfname "$datafile.tmp"
|
|
175 if {[catch {set outfile [open $tmpfname w 0600]} uerrmsg]} {
|
|
176 puts "Error opening $tmpfname for writing: $uerrmsg"
|
|
177 return 1
|
|
178 }
|
|
179
|
|
180 set uexpire [expr [clock seconds] - (7*24*60*60)]
|
|
181 foreach {ukey udata} [array get entries] {
|
|
182 # if {$isentries($ukey) != 0 || [lindex $udata 0] >= $uexpire} {
|
|
183 puts $outfile [join $udata "½"]
|
|
184 # }
|
|
185 }
|
|
186
|
|
187 close $outfile
|
|
188 if {[catch {file rename -force -- $tmpfname $datafile} uerrmsg]} {
|
|
189 puts "Error renaming $tmpfname to $datafile: $uerrmsg"
|
|
190 }
|
|
191 #puts "$newurls new entries."
|