slow solution to check the presence of many URLs from a file

master
Fabien Benetou 14 years ago
parent 8e6c379702
commit 598a903aac
  1. 8
      shell_scripts/browser_queries

@ -43,7 +43,13 @@ echo -e "\nURLs visited between starting URL and ending URL"
# as first done with http://fabien.benetou.fr/Events/DrumbeatParis#VisitedLinks # as first done with http://fabien.benetou.fr/Events/DrumbeatParis#VisitedLinks
URLSLISTFILE=~/urlstocheck URLSLISTFILE=~/.urlstocheck
SCRIPTURL=`grep '^\$ScriptUrl' ~/www/mirrors/fabien/local/config.php | grep -v 127 | sed -e "s/.*'\(.*\)';/\1/"`
echo "" > $URLSLISTFILE
ls ~/www/mirrors/fabien/wiki.d/ | grep -v "del-" | tr "." "/" | sed -e "s,^,$SCRIPTURL\/," >> $URLSLISTFILE
while read line; do
sqlite3 -column $DB "SELECT count(*),url FROM moz_historyvisits, moz_places WHERE moz_historyvisits.place_id = moz_places.id and url LIKE '$line'" | grep "0" && echo -e "\t$line"
done < $URLSLISTFILE
echo -e "\nwhich URLs from this list have never been visited this week" echo -e "\nwhich URLs from this list have never been visited this week"
echo -e "\nwhich URLs from this list have never been visited in general" echo -e "\nwhich URLs from this list have never been visited in general"
# e.g. all wiki pages, all page from a wiki group (e.g. memory recall) # e.g. all wiki pages, all page from a wiki group (e.g. memory recall)

Loading…
Cancel
Save