# HG changeset patch # User Matti Hamalainen # Date 1370436690 -10800 # Node ID c94b4e1a2ed414d6cebff67b13d23cd98f661e5a # Parent 824d24f2082646e6d6d3f85367a9b62638e9e6a6 Rename some files, etc. diff -r 824d24f20826 -r c94b4e1a2ed4 feeds.tcl --- a/feeds.tcl Wed Jun 05 15:49:47 2013 +0300 +++ b/feeds.tcl Wed Jun 05 15:51:30 2013 +0300 @@ -6,6 +6,10 @@ # Requires get_feeds.tcl to be run as a cronjob, for example # 15 * * * * /absolute/path/to/get_feeds.tcl # +# See also create_feeds_db.tcl OR convert_feeds_db.tcl, as you will +# need to either create a SQLite3 database or convert old text flat +# file to SQLite3. +# # This script is freely distributable under GNU GPL (version 2) license. # ########################################################################## diff -r 824d24f20826 -r c94b4e1a2ed4 get_feeds.tcl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/get_feeds.tcl Wed Jun 05 15:51:30 2013 +0300 @@ -0,0 +1,178 @@ +#!/usr/bin/tclsh +# +# NOTICE! Change above path to correct tclsh binary path! +# +########################################################################## +# +# FeedCheck fetcher v0.8 by Matti 'ccr' Hamalainen +# (C) Copyright 2008-2013 Tecnic Software productions (TNSP) +# +# This script is freely distributable under GNU GPL (version 2) license. +# +########################################################################## +package require sqlite3 +source [file dirname [info script]]/util_convert.tcl + +# SQLite3 database, MUST be set to same as in feeds.tcl +set feeds_dbfile "/home/niinuska/bot/feeds.sqlite" + +# Use a HTTP proxy? 1 = yes, 0 = no +set http_proxy 0 + +# HTTP proxy address and port +set http_proxy_host "cache.inet.fi" +set http_proxy_port 800 + + +########################################################################## + +set html_ent [split " | |»|>>|"|\"|ä|ä|ö|ö|Ä|Ä|Ö|Ö|&|&|<|<|>|>|ä|ä|ö|ö|Ä|Ä" "|"] + +package require http +::http::config -urlencoding iso8859-1 -useragent "Mozilla/4.0 (compatible; MSIE 6.0; MSIE 5.5; Windows NT 5.0) Opera 9.5" +if {$http_proxy != 0} { + ::http::config -proxyhost $http_proxy_host -proxyport $http_proxy_port +} + + +proc convert_ent {udata} { + global html_ent + return [string map $html_ent $udata] +} + + +proc add_entry {uname uprefix uurl utitle} { + global currclock feeds_db nitems + set utest "$uprefix[convert_ent $uurl]" + + set usql "SELECT title FROM feeds WHERE url='[escape $utest]' AND feed='[escape $uname]'" + if {![feeds_db exists $usql]} { + set usql "INSERT INTO feeds (feed,utime,url,title) VALUES ('[escape $uname]', $currclock, '[escape $utest]', '[escape $utitle]')" + incr nitems + if {[catch {feeds_db eval $usql} uerrmsg]} { + puts "\nError: $uerrmsg on:\n$usql" + exit 15 + } + } +} + + +proc add_rss_feed {datauri dataname dataprefix} { + if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { + puts "Error getting $datauri: $uerrmsg" + return 1 + } + set upage [::http::data $utoken] + ::http::cleanup $utoken + + set umatches [regexp -all -nocase -inline -- ".\*\?<..CDATA.(.\*\?)\\\]\\\]>.\*\?(http.\*\?).\*\?" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] + } + + if {$nmatches == 0} { + set umatches [regexp -all -nocase -inline -- ".\*\?(.\*\?).\*\?(http.\*\?).\*\?" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] + } + } + + if {$nmatches == 0} { + set umatches [regexp -all -nocase -inline -- "\]*>.\*\?(.\*\?).\*\?(http.\*\?).\*\?" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] + } + } + + return 0 +} + + +### Open database, etc +set nitems 0 +set currclock [clock seconds] +global feeds_db +if {[catch {sqlite3 feeds_db $feeds_dbfile} uerrmsg]} { + puts "Could not open SQLite3 database '$feeds_dbfile': $uerrmsg." + exit 2 +} + + +############################################################################## +### Fetch and parse Halla-aho's blog page data +set datauri "http://www.halla-aho.com/scripta/"; +set dataname "Mestari" +if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { + puts "Error getting $datauri: $uerrmsg" +} else { + set upage [::http::data $utoken] + ::http::cleanup $utoken + + set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] + } + + set umatches [regexp -all -nocase -inline -- "(\[^<\]\[^b\]\[^<\]+)" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] + } +} + + +### The Adventurers +set datauri "http://www.peldor.com/chapters/index_sidebar.html"; +set dataname "The Adventurers" +if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { + puts "Error getting $datauri: $uerrmsg" +} else { + set upage [::http::data $utoken] + ::http::cleanup $utoken + + set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname "http://www.peldor.com/" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] + } +} + + +### Order of the Stick +set datauri "http://www.giantitp.com/comics/oots.html"; +set dataname "OOTS" +if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { + puts "Error getting $datauri: $uerrmsg" +} else { + set upage [::http::data $utoken] + ::http::cleanup $utoken + + set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] + set nmatches [llength $umatches] + for {set n 0} {$n < $nmatches} {incr n 3} { + add_entry $dataname "http://www.giantitp.com" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] + } +} + + +### Generic RSS-feed fetching +#add_rss_feed "http://www.kaleva.fi/rss/145.xml" "Kaleva/Tiede" "" + +add_rss_feed "http://www.effi.org/xml/uutiset.rss" "EFFI" "" + +add_rss_feed "http://static.mtv3.fi/rss/uutiset_rikos.rss" "MTV3/Rikos" "" + +add_rss_feed "http://www.blastwave-comic.com/rss/blastwave.xml" "Blastwave" "" + +#add_rss_feed "http://lehti.samizdat.info/feed/" "Lehti" "" + + + +### Close database +feeds_db close + +puts "$nitems new items." diff -r 824d24f20826 -r c94b4e1a2ed4 hae_feedit.tcl --- a/hae_feedit.tcl Wed Jun 05 15:49:47 2013 +0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,178 +0,0 @@ -#!/usr/bin/tclsh -# -# NOTICE! Change above path to correct tclsh binary path! -# -########################################################################## -# -# FeedCheck fetcher v0.8 by Matti 'ccr' Hamalainen -# (C) Copyright 2008-2013 Tecnic Software productions (TNSP) -# -# This script is freely distributable under GNU GPL (version 2) license. -# -########################################################################## -package require sqlite3 -source [file dirname [info script]]/util_convert.tcl - -# SQLite3 database, MUST be set to same as in feeds.tcl -set feeds_dbfile "/home/niinuska/bot/feeds.sqlite" - -# Use a HTTP proxy? 1 = yes, 0 = no -set http_proxy 0 - -# HTTP proxy address and port -set http_proxy_host "cache.inet.fi" -set http_proxy_port 800 - - -########################################################################## - -set html_ent [split " | |»|>>|"|\"|ä|ä|ö|ö|Ä|Ä|Ö|Ö|&|&|<|<|>|>|ä|ä|ö|ö|Ä|Ä" "|"] - -package require http -::http::config -urlencoding iso8859-1 -useragent "Mozilla/4.0 (compatible; MSIE 6.0; MSIE 5.5; Windows NT 5.0) Opera 9.5" -if {$http_proxy != 0} { - ::http::config -proxyhost $http_proxy_host -proxyport $http_proxy_port -} - - -proc convert_ent {udata} { - global html_ent - return [string map $html_ent $udata] -} - - -proc add_entry {uname uprefix uurl utitle} { - global currclock feeds_db nitems - set utest "$uprefix[convert_ent $uurl]" - - set usql "SELECT title FROM feeds WHERE url='[escape $utest]' AND feed='[escape $uname]'" - if {![feeds_db exists $usql]} { - set usql "INSERT INTO feeds (feed,utime,url,title) VALUES ('[escape $uname]', $currclock, '[escape $utest]', '[escape $utitle]')" - incr nitems - if {[catch {feeds_db eval $usql} uerrmsg]} { - puts "\nError: $uerrmsg on:\n$usql" - exit 15 - } - } -} - - -proc add_rss_feed {datauri dataname dataprefix} { - if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { - puts "Error getting $datauri: $uerrmsg" - return 1 - } - set upage [::http::data $utoken] - ::http::cleanup $utoken - - set umatches [regexp -all -nocase -inline -- ".\*\?<..CDATA.(.\*\?)\\\]\\\]>.\*\?(http.\*\?).\*\?" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] - } - - if {$nmatches == 0} { - set umatches [regexp -all -nocase -inline -- ".\*\?(.\*\?).\*\?(http.\*\?).\*\?" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] - } - } - - if {$nmatches == 0} { - set umatches [regexp -all -nocase -inline -- "\]*>.\*\?(.\*\?).\*\?(http.\*\?).\*\?" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname $dataprefix [lindex $umatches [expr $n+2]] [lindex $umatches [expr $n+1]] - } - } - - return 0 -} - - -### Open database, etc -set nitems 0 -set currclock [clock seconds] -global feeds_db -if {[catch {sqlite3 feeds_db $feeds_dbfile} uerrmsg]} { - puts "Could not open SQLite3 database '$feeds_dbfile': $uerrmsg." - exit 2 -} - - -############################################################################## -### Fetch and parse Halla-aho's blog page data -set datauri "http://www.halla-aho.com/scripta/"; -set dataname "Mestari" -if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { - puts "Error getting $datauri: $uerrmsg" -} else { - set upage [::http::data $utoken] - ::http::cleanup $utoken - - set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] - } - - set umatches [regexp -all -nocase -inline -- "(\[^<\]\[^b\]\[^<\]+)" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname $datauri [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] - } -} - - -### The Adventurers -set datauri "http://www.peldor.com/chapters/index_sidebar.html"; -set dataname "The Adventurers" -if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { - puts "Error getting $datauri: $uerrmsg" -} else { - set upage [::http::data $utoken] - ::http::cleanup $utoken - - set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname "http://www.peldor.com/" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] - } -} - - -### Order of the Stick -set datauri "http://www.giantitp.com/comics/oots.html"; -set dataname "OOTS" -if {[catch {set utoken [::http::geturl $datauri -binary true -timeout 5000]} uerrmsg]} { - puts "Error getting $datauri: $uerrmsg" -} else { - set upage [::http::data $utoken] - ::http::cleanup $utoken - - set umatches [regexp -all -nocase -inline -- "(\[^<\]+)" $upage] - set nmatches [llength $umatches] - for {set n 0} {$n < $nmatches} {incr n 3} { - add_entry $dataname "http://www.giantitp.com" [lindex $umatches [expr $n+1]] [lindex $umatches [expr $n+2]] - } -} - - -### Generic RSS-feed fetching -#add_rss_feed "http://www.kaleva.fi/rss/145.xml" "Kaleva/Tiede" "" - -add_rss_feed "http://www.effi.org/xml/uutiset.rss" "EFFI" "" - -add_rss_feed "http://static.mtv3.fi/rss/uutiset_rikos.rss" "MTV3/Rikos" "" - -add_rss_feed "http://www.blastwave-comic.com/rss/blastwave.xml" "Blastwave" "" - -#add_rss_feed "http://lehti.samizdat.info/feed/" "Lehti" "" - - - -### Close database -feeds_db close - -puts "$nitems new items."