added personal scripts

master
Fabien Benetou 14 years ago
parent 8a3d5f4c40
commit d37fcfdce1
  1. 3
      shell_scripts/con_restart
  2. 3
      shell_scripts/downloads_to_watch_later
  3. 3
      shell_scripts/export_shared_links_per_person
  4. 4
      shell_scripts/gallery_making
  5. 45
      shell_scripts/gm_reverting_irc_logs_links
  6. 47
      shell_scripts/gm_reverting_pim_links
  7. 2
      shell_scripts/last_tweeters_account_on_logname
  8. 14
      shell_scripts/most_used_commands
  9. 15
      shell_scripts/mytasks
  10. 20
      shell_scripts/pmwiki
  11. 2
      shell_scripts/recently_modified_etc_configs
  12. 20
      shell_scripts/social_behavior_distribution
  13. 14
      shell_scripts/social_grep
  14. 3
      shell_scripts/tunkranker
  15. 25
      shell_scripts/videolectures_rtmpdump_warper
  16. 5
      shell_scripts/wiki_average_words_per_diff
  17. 9
      shell_scripts/wiki_contribution_distribution
  18. 13
      shell_scripts/wiki_diff_distribution
  19. 19
      shell_scripts/wiki_link_checking
  20. 7
      shell_scripts/wiki_list_content
  21. 22
      shell_scripts/wiki_oldest_mentions
  22. 4
      shell_scripts/wiki_page_diffs
  23. 18
      shell_scripts/wiki_page_diffs_visualization
  24. 13
      shell_scripts/wiki_per_page_diffs_visualization
  25. 14
      shell_scripts/wiki_recent_untrusted_edits
  26. 10
      shell_scripts/wiki_semantic_categories
  27. 5
      shell_scripts/wiki_total_diffs
  28. 16
      shell_scripts/wiki_untrusted_edits

@ -0,0 +1,3 @@
#!/bin/sh
CON=`nmcli con list | grep wireless | sed "s/ .*//"`
nmcli con down id $CON && nmcli con up id $CON

@ -0,0 +1,3 @@
#!/bin/sh
RATE=50k
wget -c --limit-rate=$RATE -i ~/to_download_and_watch

@ -0,0 +1,3 @@
#!/bin/sh
# if no $1 then ask for person name
grep $1 ~/sharedlinks | sed "s/$1 //" | sed "$ s/^/<html><table><tr><td>/" | tac | sed "$ s/$/<\/table><\/html>/" | sed "s/^/<tr><td>/" | sed "s/^\(.*\) +0200 \(.*\)/\1<\/td><td>\2/" | sed "s/http\([^ ]*\)/<a href=\"http\1\">http\1<\/a>/g" | sed "s/$/<\/td><\/tr>/" > ~/web/benetou.fr/fabien/pub/sharedlinks/$1.html

@ -0,0 +1,4 @@
#!/bin/sh
# consider using convert to generate thumbnails and auto-rotate
echo "<html>" > gallery.html && ls *.jpg *.JPG |sort -n| sed "s/\(.*\)/<a href=\"\1\"><img height=\"200px\" src=\"\1\"\/>\1<\/a><br\/>/" > gallery.html && echo "</html>" >> gallery.html

@ -0,0 +1,45 @@
#!/bin/sh
# http://fabien.benetou.fr/Tools/Greasemonkey#RevertedPIMLinks
# TODO
# properly declare
# path
# groups to avoid
# pages to avoid
DATE=$(date +%s)
#cd /home/utopiah/web/benetou.fr/fabien/link_extractor
USERS=$(ls */*.log | grep -v twitter | grep -v identica | grep -v '#' | grep -v http)
# # to remove chanels and http to remove mistakes that screw the rest of the script
echo 'extract all the links from the logs'
# skipped here since the logs are small enough not to be pre-processed (rendered when its a wiki)
echo 'get all the links > sorted_global.txt'
grep http $USERS | grep -v "<Utopiah>" | sed "s/http/\nhttp/g" | grep http | sed "s/ .*//" | grep -e='[http://[:alnum:]|https://[:alnum:]]' | sort | uniq > sorted_global.txt
# XXX somehow http alone goes through, this should not be the case
echo 'for every link check in which page it is mentionned and append it without duplicates > indexed_links_uniqued'
echo '' > indexed_links_uniqued
while read line; do
echo -n "$line " >> indexed_links_uniqued
grep -i $line $USERS | sed "s/:.*//" | sort | uniq | xargs -0 echo "Discussion:" | sed "s/ //" >> indexed_links_uniqued
done < sorted_global.txt
echo 'clean from improper URL (e.g. " present) sed "s/\"/\\\"/g"'
grep -v '"' indexed_links_uniqued | sort | uniq > indexed_links_uniqued_cleaned
echo 'format as User.js and make it available'
cat indexed_links_uniqued_cleaned | grep http | sed 's/\([^ ]\+\) \(.*\)/user_pref("greasemonkey.scriptvals.Utopiah\/reverted PIM links.rPIMlinks \1", "\2");/' > user.js
echo "user_pref(\"greasemonkey.scriptvals.Utopiah/reverted PIM links.rPIMlinks IRCdate\", \"$DATE\");" >> user.js
echo 'compress for faster transfert'
bzip2 -k -f user.js #compress by a factor 10
echo 'make the script available via http://cloud.benetou.fr/discussions/user.js.bz2'
#mv user.js.bz2 ../pub/
echo '(note that since this is not merged with the existing user.js from user.js it will required another restart)'
#echo 'periodically call this very script'
#server cron added
#client cron not added

@ -0,0 +1,47 @@
#!/bin/sh
# http://fabien.benetou.fr/Tools/Greasemonkey#RevertedPIMLinks
# TODO
# properly declare
# path
# groups to avoid
# pages to avoid
WIKI=/home/utopiah/web/benetou.fr/fabien/
DATE=$(date +%s)
cd /home/utopiah/web/benetou.fr/fabien/link_extractor
echo 'extract all the links from the wiki by group'
for GROUP in $(ls ../wiki.d/ | sed "s/\..*//" | sort | uniq | grep -v PmWiki | grep -v Site );
do
for PAGE in $(ls ../wiki.d/$GROUP.* | sed "s/\.\.\/wiki.d\///" ); do pmwiki n=$PAGE nolog=true | sed "s/http/\nhttp/g" | grep http | grep -v benetou.fr | grep -v seedea.org | grep -v 127.0.0.1 | grep -v .ico\" | sed "s/'.*//" | sed "s/<\/a>.*//" | sed "s/$/ $PAGE/"; done > links_from_$GROUP
done
echo 'get all the links > sorted_global.txt'
cat links_from_* | sed "s/ .*//" | sort | uniq | grep -e "http://\w\|https://\w" > sorted_global.txt
echo 'for every link check in which page it is mentionned and append it without duplicates > indexed_links_uniqued'
echo '' > indexed_links_uniqued
while read line; do
echo -n "$line " >> indexed_links_uniqued
grep -i $line links_from_* | sed "s/.* //" | sort | uniq | xargs >> indexed_links_uniqued
done < sorted_global.txt
echo 'clean from improper URL (e.g. " present) sed "s/\"/\\\"/g"'
grep -v '"' indexed_links_uniqued > indexed_links_uniqued_cleaned
echo 'format as User.js and make it available'
cat indexed_links_uniqued_cleaned | sed 's/\([^ ]\+\) \(.*\)/user_pref("greasemonkey.scriptvals.Utopiah\/reverted PIM links.rPIMlinks \1", "\2");/' > user.js
echo "user_pref(\"greasemonkey.scriptvals.Utopiah/reverted PIM links.rPIMlinks date\", \"$DATE\");" >> user.js
# replaced by rsync
#echo 'compress for faster transfert'
#bzip2 -k -f user.js #compress by a factor 10
# replaced by rsync
# echo 'make the script available via http://fabien.benetou.fr/pub/user.js.bz2'
#mv user.js.bz2 ../pub/
#echo 'periodically call this very script'
#server cron added
#client cron not added

@ -0,0 +1,2 @@
#!/bin/sh
curl http://search.twitter.com/search?q=$LOGNAME | grep profile | sed "s/.*com\/\(.*\)\" onclick.*/http:\/\/twitter.com\/\1/" | grep -v $LOGNAME | sort | uniq

@ -0,0 +1,14 @@
#!/bin/sh
AVOID="con\|utopiah@benetou.fr"
echo consider also http://fabien.benetou.fr/Tools/ and exercises
# should also remove arguments without - e.g. nmcli con or ssh myserver
cat ~/.bash_history | sed "s/ /\n/g" | grep "^[[:alpha:]]" | sort | uniq -c | grep -v $AVOID | sort -n | tail | sed "s, \([a-zA-Z].*\), \1\t( http://unixhelp.ed.ac.uk/CGI/man-cgi?\1 ) ,"
# simpler version for just the first word, which is bad for commands like sort which never appear first
#cat ~/.bash_history | sed "s/ .*//" | sort | uniq -c | sort -n | tail
# consider similar usage for more than bash
# find ~ -name "*history*"
# ~/.vimperator/info/default/history-command (specific JSON format)
# ~/.newsbeuter/history.cmdline

@ -0,0 +1,15 @@
#!/bin/sh
# learn more about cron, at and batch
## note that at also requires to specify the DISPLAY
## e.g. using http://www.ibm.com/developerworks/linux/library/l-job-scheduling/index.html
# consider hooks and events e.g. inofity, git hooks, ratpoison hooks, ...
QUESTION='read -p "finished? (^C to stop, make sure to delete previous done tasks) "'
#echo merge *.pmwiki to the wiki && `$QUESTION`
mottt_through_proxy &
#evince -p 47 book_or_article.pdf; $QUESTION
## note that evince remembers the last opended page
## re-opening the document update the page numbr in the running version
# script newsbeuter to handle evince
#echo watch videos in `ls ~/*.flv`; `$QUESTION`

@ -0,0 +1,20 @@
#!/usr/bin/php -q
<?php
//to query the local PmWiki via the CLI
if (!chdir("~/web/benetou.fr/fabien/"))
die("chdir failed.");
if ($argc > 0)
{
for ($i=1;$i < $argc;$i++)
{
parse_str($argv[$i],$tmp);
$_REQUEST = array_merge($_REQUEST, $tmp);
}
}
require_once '~/web/benetou.fr/fabien/pmwiki.php';
?>

@ -0,0 +1,2 @@
#!/bin/sh
find /etc/ -mtime -1 -name *.conf

@ -0,0 +1,20 @@
#!/bin/sh
# http://fabien.benetou.fr/Tools/Irssi#LogsSocialBehaviors
if [ $# -lt 1 ]
then
echo "usage: $0 pattern [network] [user]" ; exit;
fi
#save params in ~/$0_history
# history | grep social_behavior_distribution
## cheaper version
PERSON=$1;
BEHAVIOR_PATTERN=$2;
FRESHNESS=-20000;
tail $FRESHNESS ~/irclogs/seedeabitlbee/$PERSON.log | grep -i "$BEHAVIOR_PATTERN" | grep -i $PERSON | sed "s/:.*//" | sort -n | uniq -c | sort -n -r | head -3
# note that seedeabitlbee is now fabien
# note that comparison can't be done because freshness changes per person
# i.e. the more you talk with a person, the fresher the information will be, and vice versa

@ -0,0 +1,14 @@
#!/bin/sh
# http://fabien.benetou.fr/Tools/Shell#Scripting
# shell prototype
# if 0 args display help
# $#==0 echo "usage: $0 pattern [network] [user]" ; exit; ?
grep $@ ~/irclogs/*/*
#$2 for server limitations
#$3 for user limitations
#save params in ~/$0_history
echo $(date +%s) $@ >> ~/SocialMemorization_history
# history | grep SocialMemorization.sh
## cheaper version

@ -0,0 +1,3 @@
#!/bin/sh
# generalize via a parameter, default back to USER
curl http://tunkrank.com/refresh/utopiah.json && curl http://tunkrank.com/score/utopiah.json >> ~/web/benetou.fr/fabien/pub/socialbehaviors/tunkrank_utopiah.txt && echo "" >> ~/web/benetou.fr/fabien/pub/socialbehaviors/tunkrank_utopiah.txt

@ -0,0 +1,25 @@
#!/bin/sh
# http://fabien.benetou.fr/Tools/Tools#RTMPDump
if [ $# -lt 1 ]
then
echo "VideoLectures helper for RTMPDump"
echo "usage: $0 http://VideoLecture.net/targetlecture/ [targetlecture.flv]"
echo "if the second parameter is omitted the name of the lecture from the URL will be used"
echo ""
echo "Want to grab a list of lectures? Consider xargs (e.g. xargs -n 1 -a list_of_lectures.txt videolectures.sh )."
echo ""
echo "(note that resuming does not work, neither does keyframe skipping)"
exit
fi
URL=$1
DEST=$2
# if $2 is empty, use the last part of the URI
if [ $# -lt 2 ]
then
DEST=$(echo $1 | sed "s|http://videolectures.net/\(.*\)/|\1|").flv
fi
REMOTE=$(curl $1 | grep clip.url | sed "s/.*\"\(.*\)\".*/\1/" | grep -v flv)
SOURCE=rtmp://oxy.videolectures.net/video
rtmpdump -W $1 -o $DEST -r $SOURCE -y $REMOTE

@ -0,0 +1,5 @@
#!/bin/sh
# takes are argument the wiki root
grep diff: $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site.) | wc | awk '{print $2/$1}' | bc

@ -0,0 +1,9 @@
#!/bin/sh
#if not in wiki.d/ via pwd or parameter
##then display help
# `pwd|grep wiki.d`
## XXX this and those following script are not done at the root of the wiki
### wiki_oldest_mentions
### wiki_page_diffs
grep author: $@ | sed -e "s/.*=//" | sort | uniq -i -c | sort -nr | head

@ -0,0 +1,13 @@
#!/bin/sh
# to use in wiki root
## XXX this is not consistent with some other commands
### e.g. wiki_contribution_distribution
grep diff: $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site.) > alllines
echo "" > diff_distribution
while read line; do
echo "$line " | wc -w >> diff_distribution
done < alllines
sort -n diff_distribution | uniq -c | sort -r -n

@ -0,0 +1,19 @@
#!/bin/sh
# http://fabien.benetou.fr/Wiki/ToDo#Maintenance
# TODO
# generalize for other URLs
if [ $# -lt 1 ]
then
echo "Check page per page against dead links for PmWiki"
echo "usage: $0 PmWiki_path"
exit
fi
PATH=$1
NOW_EPOCH=$(date +"%s")
URL=http://fabien.benetou.fr/
for PAGE in $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site | sed "s/.*wiki\.d\///" | tr "." "/")
do
checklink -s $URL/$PAGE > $PATH/pub/checking/$(echo $PAGE|tr "/" ".").check
# using the Perl W3C-checklink
## should fail if not installed and suggest the CPAN command
done;

@ -0,0 +1,7 @@
#!/bin/sh
#to be done in the root of the wiki
for X in $(ls $1/wiki.d);
do
echo $X | grep -v -E ".flock|,new|,del|RecentChanges|PmWiki.|Site."
done

@ -0,0 +1,22 @@
#!/bin/sh
# http://fabien.benetou.fr/MemoryRecalls/ImprovingPIM
# TODO
# discard multiple edits on the same page
# put the number of results as a parameter (or just let the user do so?)
# cache the result as it should not change over time (except if pages are moved)
if [ $# -lt 1 ]
then
echo "Locating the first 10 edits of a word for PmWiki"
echo "usage: $0 word [PmWiki/wiki.d/path]"
echo "if the second parameter is omitted the path is assumed to be ."
exit
fi
WORD=$1
DEST=$2
NUMBEROFRESULT=10
# if $2 is empty, use the last part of the URI
if [ $# -lt 2 ]
then
DEST=.
fi
grep -i $WORD $DEST/* | sed "s/=.*//" | grep diff | sort -n -t: -k3 -r | tail -$NUMBEROFRESULT

@ -0,0 +1,4 @@
#!/bin/sh
## to be done in wiki.d/
grep diff: $1 | awk '{FS=":"; print $2}' | sort -n | uniq

@ -0,0 +1,18 @@
#!/bin/sh
# deprecated for Processing visualizations http://fabien.benetou.fr/Wiki/Visualization#timeline
if [[ ! ("$#" == 1) ]]; then
echo 'Generate GNUPlot visualization for the PmWiki directory'
echo "Usage : $0 path_to_PmWiki_directory"
exit 1
fi
NOW_EPOCH=$(date +"%s")
for PAGE in $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site | sed "s/.*wiki\.d\///")
do
wiki_page_diffs $1/wiki.d/$PAGE > $1/pub/visualization/edits_per_page/$PAGE.data;
echo -e "set terminal png\nset output '$PAGE.png'\nset notitle\nset nokey\nset xlabel ''\nset format x ''\nset format y ''\nset ylabel ''\nset timefmt '%s'\nset size 1,0.05\nplot [1214268628:$NOW_EPOCH] [0:] '$PAGE.data' using 1:0\n" > $1/pub/visualization/edits_per_page/$PAGE.plt;
done;
for PAGE in $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site | sed "s/.*wiki\.d\///")
do
(cd $1/pub/visualization/edits_per_page/; gnuplot $PAGE.plt;);
done

@ -0,0 +1,13 @@
#!/bin/sh
# deprecated for Processing visualizations http://fabien.benetou.fr/Wiki/Visualization#timeline
NOW_EPOCH=$(date +"%s")
WIKI_PATH=/home/utopiah/web/benetou.fr/fabien
sleep 1
for PAGE in $(echo $1 | grep -v -E "flock|pageindex|RecentChanges" | sed "s/,new//");
do
/home/utopiah/bin/wiki_page_diffs $WIKI_PATH/wiki.d/$PAGE > $WIKI_PATH/pub/visualization/edits_per_page/$PAGE.data;
echo -e "set terminal png\nset output '$PAGE.png'\nset notitle\nset nokey\nset xlabel ''\nset format x ''\nset format y ''\nset ylabel ''\nset timefmt '%s'\nset size 1,0.05\nplot [1214268628:$NOW_EPOCH] [0:] '$PAGE.data' using 1:0\n" > $WIKI_PATH/pub/visualization/edits_per_page/$PAGE.plt;
cd $WIKI_PATH/pub/visualization/edits_per_page/; gnuplot $PAGE.plt;
done

@ -0,0 +1,14 @@
#!/bin/sh
# TODO
# fix untrusted_edits path problem first
# note that the output format changed too
EXP_ARGS=1
if [ "$1" = "--help" -o "$1" = "-h" -o $# -lt $EXP_ARGS ]
then
echo "Search for the last non trusted edits in PmWiki"
echo "Usage: $0 PmWikiPath"
echo "(uses untrusted_edits)"
exit
fi
THA=$(($(date +"%s") - (60 * 60 * 24 * 2) ));
grep time= $( wiki_untrusted_edits "$1" ) | grep -v ctime | sed -e "s/$/=$THA/" | awk 'BEGIN{FS="="} {if ($2 > $3) print $0}' | sed "s/:.*//"

@ -0,0 +1,10 @@
#!/bin/sh
# to execute in wiki.d/
APIKEY=`cat ~/.semantichacker.com_apikey`
for PAGE in `echo $1 | sed "s/\./\//"`;
do
sleep 5
echo "http://api.semantichacker.com/$APIKEY/category?showLabels=true&uri=http://fabien.benetou.fr/$PAGE"
done

@ -0,0 +1,5 @@
#!/bin/sh
# takes are argument the wiki root
grep diff: $(ls $1/wiki.d/* | grep -v PmWiki. | grep -v Site.) | awk '{FS=":"; print $3}' | wc -l

@ -0,0 +1,16 @@
#!/bin/sh
# TODO
# BUG! works with . but not with full path
EXP_ARGS=1
LIST=~/.wiki_trusted_authors
if [ "$1" = "--help" -o "$1" = "-h" -o $# -lt $EXP_ARGS ]
then
echo "Search for non trust edits in PmWiki"
echo "Usage: $0 PmWikiPath"
echo "(note that the list of trusted authors is in $LIST)"
exit
fi
grep author: $1/* | grep -v diff: | grep -v -f $LIST
# seems to be more interesting to delegate that to the user
#| sed -e "s/:.*//" | sort | uniq
Loading…
Cancel
Save