initial vomit
This commit is contained in:
commit
d2ff51546f
108
Makefile
Normal file
108
Makefile
Normal file
|
@ -0,0 +1,108 @@
|
|||
PY=python3
|
||||
PELICAN=pelican
|
||||
PELICANOPTS=
|
||||
|
||||
BASEDIR=$(CURDIR)
|
||||
INPUTDIR=$(BASEDIR)/content
|
||||
OUTPUTDIR=$(BASEDIR)/output
|
||||
CONFFILE=$(BASEDIR)/pelicanconf.py
|
||||
PUBLISHCONF=$(BASEDIR)/publishconf.py
|
||||
|
||||
FTP_HOST=syntax-fehler.de
|
||||
FTP_USER=syntax-fehler
|
||||
FTP_TARGET_DIR=httpdocs
|
||||
|
||||
SSH_HOST=localhost
|
||||
SSH_PORT=22
|
||||
SSH_USER=root
|
||||
SSH_TARGET_DIR=/var/www
|
||||
|
||||
S3_BUCKET=my_s3_bucket
|
||||
|
||||
CLOUDFILES_USERNAME=my_rackspace_username
|
||||
CLOUDFILES_API_KEY=my_rackspace_api_key
|
||||
CLOUDFILES_CONTAINER=my_cloudfiles_container
|
||||
|
||||
DROPBOX_DIR=~/Dropbox/Public/
|
||||
|
||||
DEBUG ?= 0
|
||||
ifeq ($(DEBUG), 1)
|
||||
PELICANOPTS += -D
|
||||
endif
|
||||
|
||||
help:
|
||||
@echo 'Makefile for a pelican Web site '
|
||||
@echo ' '
|
||||
@echo 'Usage: '
|
||||
@echo ' make html (re)generate the web site '
|
||||
@echo ' make clean remove the generated files '
|
||||
@echo ' make regenerate regenerate files upon modification '
|
||||
@echo ' make publish generate using production settings '
|
||||
@echo ' make serve [PORT=8000] serve site at http://localhost:8000'
|
||||
@echo ' make devserver [PORT=8000] start/restart develop_server.sh '
|
||||
@echo ' make stopserver stop local server '
|
||||
@echo ' make ssh_upload upload the web site via SSH '
|
||||
@echo ' make rsync_upload upload the web site via rsync+ssh '
|
||||
@echo ' make dropbox_upload upload the web site via Dropbox '
|
||||
@echo ' make ftp_upload upload the web site via FTP '
|
||||
@echo ' make s3_upload upload the web site via S3 '
|
||||
@echo ' make cf_upload upload the web site via Cloud Files'
|
||||
@echo ' make github upload the web site via gh-pages '
|
||||
@echo ' '
|
||||
@echo 'Set the DEBUG variable to 1 to enable debugging, e.g. make DEBUG=1 html'
|
||||
@echo ' '
|
||||
|
||||
html:
|
||||
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
|
||||
|
||||
clean:
|
||||
[ ! -d $(OUTPUTDIR) ] || rm -rf $(OUTPUTDIR)
|
||||
|
||||
regenerate:
|
||||
$(PELICAN) -r $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
|
||||
|
||||
serve:
|
||||
ifdef PORT
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server $(PORT)
|
||||
else
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server
|
||||
endif
|
||||
|
||||
devserver:
|
||||
ifdef PORT
|
||||
$(BASEDIR)/develop_server.sh restart $(PORT)
|
||||
else
|
||||
$(BASEDIR)/develop_server.sh restart
|
||||
endif
|
||||
|
||||
stopserver:
|
||||
kill -9 `cat pelican.pid`
|
||||
kill -9 `cat srv.pid`
|
||||
@echo 'Stopped Pelican and SimpleHTTPServer processes running in background.'
|
||||
|
||||
publish:
|
||||
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(PUBLISHCONF) $(PELICANOPTS)
|
||||
|
||||
ssh_upload: publish
|
||||
scp -P $(SSH_PORT) -r $(OUTPUTDIR)/* $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR)
|
||||
|
||||
rsync_upload: publish
|
||||
rsync -e "ssh -p $(SSH_PORT)" -P -rvz --delete $(OUTPUTDIR)/ $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR) --cvs-exclude
|
||||
|
||||
dropbox_upload: publish
|
||||
cp -r $(OUTPUTDIR)/* $(DROPBOX_DIR)
|
||||
|
||||
ftp_upload: publish
|
||||
lftp ftp://$(FTP_USER)@$(FTP_HOST) -e "mirror -R $(OUTPUTDIR) $(FTP_TARGET_DIR) ; quit"
|
||||
|
||||
s3_upload: publish
|
||||
s3cmd sync $(OUTPUTDIR)/ s3://$(S3_BUCKET) --acl-public --delete-removed
|
||||
|
||||
cf_upload: publish
|
||||
cd $(OUTPUTDIR) && swift -v -A https://auth.api.rackspacecloud.com/v1.0 -U $(CLOUDFILES_USERNAME) -K $(CLOUDFILES_API_KEY) upload -c $(CLOUDFILES_CONTAINER) .
|
||||
|
||||
github: publish
|
||||
ghp-import $(OUTPUTDIR)
|
||||
git push origin gh-pages
|
||||
|
||||
.PHONY: html help clean regenerate serve devserver publish ssh_upload rsync_upload dropbox_upload ftp_upload s3_upload cf_upload github
|
0
content/extra/robots.txt
Normal file
0
content/extra/robots.txt
Normal file
57
content/posts/awstats-and-lighttpd.rst
Normal file
57
content/posts/awstats-and-lighttpd.rst
Normal file
|
@ -0,0 +1,57 @@
|
|||
awstats and lighttpd
|
||||
####################
|
||||
:date: 2012-02-06 13:40
|
||||
:tags: awstats, lighttpd, graphite
|
||||
|
||||
These snippets are in a ”*worked* for me” state. most of this stuff will
|
||||
break your system when executing.
|
||||
|
||||
Assumptions:
|
||||
|
||||
- **/srv/http/euer.krebsco.de** - served by lighttpd on public
|
||||
interface
|
||||
- **/srv/http/priv** - served on private interface (darknet)
|
||||
|
||||
lighttpd seperate subdomain logging and awstats
|
||||
-----------------------------------------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
apt-get install python-django python-cairo
|
||||
sudo easy_install django-tagging
|
||||
|
||||
pip install carbon
|
||||
pip install whisper
|
||||
pip install graphite-web
|
||||
cd /opt/graphite/conf
|
||||
cp carbon.conf.example carbon.conf
|
||||
cp graphite.wsgi.example graphite.wsgi
|
||||
cp storage-schemas.conf.example storage-schemas.conf
|
||||
cd ..
|
||||
cp examples/example-graphite-vhost.conf
|
||||
/etc/apache2/sites-enabled/000-default.conf
|
||||
chown www-data:www-data -R storage/ webapp/
|
||||
cd webapp/graphite
|
||||
cp local_settings.py.example local_settings.py
|
||||
python manage.py syncdb
|
||||
python /opt/graphite/bin/carbon-cache.py start
|
||||
/etc/init.d/apache2 restart
|
||||
|
||||
|
||||
awstats for subdomain
|
||||
---------------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
apt-get install awstats
|
||||
cat > /etc/awstats/awstats.euer.krebsco.de.conf <<EOF
|
||||
LogFile="/var/log/lighttpd/euer.krebsco.de/access.log"
|
||||
SiteDomain="euer.krebsco.de"
|
||||
LogFormat=1
|
||||
Include "/etc/awstats/awstats.conf.local"
|
||||
EOF
|
||||
ln -s /usr/share/awstats/icon /srv/http/priv/awstats-icon
|
||||
cp /usr/lib/cgi-bin/awstats.pl /srv/http/priv/awstats
|
||||
awstats -config=euer.krebsco.de -update
|
44
content/posts/custom-cd-images-on-sansa-u3.rst
Normal file
44
content/posts/custom-cd-images-on-sansa-u3.rst
Normal file
|
@ -0,0 +1,44 @@
|
|||
Custom CD Images on Sansa U3
|
||||
############################
|
||||
:date: 2012-02-13 14:00
|
||||
:tags: usb,iso,autosart
|
||||
|
||||
I’m using a Sandisk Cruzer 8GB SDCZ36-008G-E11 (not B35) to deploy ISOs
|
||||
on the virtual CD-Rom drive (U3 Smart).
|
||||
|
||||
write iso
|
||||
---------
|
||||
Some computers do better with a **real** cd-rom drive when botting a live
|
||||
system.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
dmesg | grep CD-ROM
|
||||
u3-tool -i /dev/sdx1
|
||||
#resize cd-drive to N blocks
|
||||
u3-tool -p 7065646592 /dev/sdx1
|
||||
u3-tool -l debian.iso /dev/sdx1
|
||||
|
||||
create own iso
|
||||
--------------
|
||||
In some cases you want to write a **special** iso to the usb stick, for example
|
||||
to automate some tasks on a friends computer or make use of the great autostart
|
||||
feature which will only be available for cd-rom drives but not usb-sticks.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
mkdir myiso
|
||||
cat << EOF > myiso/autorun.inf
|
||||
[autorun]
|
||||
action=Open folder to view files
|
||||
shellexecute=calc.exe
|
||||
icon=folder.ico
|
||||
EOF
|
||||
wget folder.ico calc.exe
|
||||
mkisofs -V FreeStuff -J -r -o my.iso myiso
|
||||
u3-tool /dev/sdx1 my.iso
|
||||
|
||||
See also http://forums.hak5.org/index.php?showtopic=17267 for a sweet
|
||||
USB Switchblade (pyblade).
|
205
content/posts/daemonize-and-autostart-under-debian-and-rhel.rst
Normal file
205
content/posts/daemonize-and-autostart-under-debian-and-rhel.rst
Normal file
|
@ -0,0 +1,205 @@
|
|||
Daemonize and Autostart under Debian and RHEL
|
||||
#############################################
|
||||
|
||||
:date: 2012-04-05 11:57
|
||||
:tags: debian,redhat,autostart
|
||||
|
||||
Daemonizing and autostarting a process is still a pain in the ass, so
|
||||
here are two scripts which can be placed under /etc/init.d and if you
|
||||
are lucky everything will work.
|
||||
|
||||
|
||||
Example is the punani backend, an universal package resolver and installer
|
||||
which is essentially a python webserver (now obsolete).
|
||||
|
||||
debian init-script
|
||||
==================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#! /bin/sh
|
||||
# uses template from /etc/init.d/skeleton
|
||||
### BEGIN INIT INFO
|
||||
# Provides: punani
|
||||
# Required-Start:
|
||||
# Required-Stop:
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: punani
|
||||
# Description: starts punani daemon
|
||||
#
|
||||
### END INIT INFO
|
||||
|
||||
PATH=/sbin:/usr/sbin:/bin:/usr/bin
|
||||
NAME=punani
|
||||
DESC="$NAME daemon"
|
||||
DAEMON=/usr/bin/python
|
||||
DAEMON_ARGS="/krebs/punani/index.py"
|
||||
PIDFILE=/var/run/$NAME.pid
|
||||
SCRIPTNAME=/etc/init.d/$NAME
|
||||
|
||||
[ -x "$DAEMON" ] || exit 0
|
||||
[ -r /etc/default/$NAME ] && . /etc/default/$NAME
|
||||
. /lib/init/vars.sh
|
||||
. /lib/lsb/init-functions
|
||||
|
||||
do_start()
|
||||
{
|
||||
# 0 if daemon has been started
|
||||
# 1 if daemon was already running
|
||||
# 2 if daemon could not be started
|
||||
start-stop-daemon -b --start --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \
|
||||
|| return 1
|
||||
start-stop-daemon -b --start --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON -- \
|
||||
$DAEMON_ARGS \
|
||||
|| return 2
|
||||
}
|
||||
|
||||
do_stop()
|
||||
{
|
||||
# 0 if daemon has been stopped
|
||||
# 1 if daemon was already stopped
|
||||
# 2 if daemon could not be stopped
|
||||
start-stop-daemon --stop --retry=TERM/30/KILL/5 --pidfile $PIDFILE
|
||||
RETVAL="$?"
|
||||
[ "$RETVAL" = 2 ] && return 2
|
||||
rm -f $PIDFILE
|
||||
return "$RETVAL"
|
||||
}
|
||||
|
||||
do_reload() {
|
||||
start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE
|
||||
return 0
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
[ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
|
||||
do_start
|
||||
case "$?" in
|
||||
0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
|
||||
2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
|
||||
esac
|
||||
;;
|
||||
stop)
|
||||
[ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
|
||||
do_stop
|
||||
case "$?" in
|
||||
0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
|
||||
2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
|
||||
esac
|
||||
;;
|
||||
status)
|
||||
status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
|
||||
;;
|
||||
restart|force-reload)
|
||||
log_daemon_msg "Restarting $DESC" "$NAME"
|
||||
do_stop
|
||||
case "$?" in
|
||||
0|1)
|
||||
do_start
|
||||
case "$?" in
|
||||
0) log_end_msg 0 ;;
|
||||
1) log_end_msg 1 ;;
|
||||
*) log_end_msg 1 ;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
# Failed to stop
|
||||
log_end_msg 1
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2
|
||||
exit 3
|
||||
;;
|
||||
esac
|
||||
|
||||
:
|
||||
|
||||
|
||||
register the script
|
||||
-------------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
update-rc.d punani defaults
|
||||
service punani start
|
||||
|
||||
RHEL Init Script
|
||||
================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
# `forked` (read stolen) from http://bitten.edgewall.org/wiki/BittenSlaveDaemonRedhat
|
||||
#
|
||||
# processname: punani
|
||||
# config: /krebs/punani/config.json
|
||||
# pidfile: /var/run/punani.pid
|
||||
# chkconfig: 2345 99 01
|
||||
# description: punani daemon
|
||||
|
||||
# Source function library.
|
||||
. /etc/rc.d/init.d/functions
|
||||
|
||||
PATH=/sbin:/usr/sbin:/bin:/usr/bin
|
||||
DESC="punani daemon"
|
||||
NAME=punani
|
||||
DAEMON=/usr/bin/python
|
||||
DAEMON_ARGS="/krebs/punani/index.py"
|
||||
DAEMON_USER=nobody
|
||||
PIDFILE=/var/run/$NAME.pid
|
||||
|
||||
[ -x "$DAEMON" ] || exit 0
|
||||
|
||||
[ -r /etc/sysconfig/$NAME ] && . /etc/sysconfig/$NAME
|
||||
|
||||
start() {
|
||||
echo -n $"Starting $NAME: "
|
||||
daemon --user="$DAEMON_USER" --pidfile="$PIDFILE" "$DAEMON $DAEMON_ARGS &" # daemonize here
|
||||
RETVAL=$?
|
||||
pid=`ps -A | grep $NAME | cut -d" " -f2`
|
||||
pid=`echo $pid | cut -d" " -f2`
|
||||
if [ -n "$pid" ]; then
|
||||
echo $pid > "$PIDFILE"
|
||||
fi
|
||||
echo
|
||||
return $RETVAL
|
||||
}
|
||||
stop() {
|
||||
echo -n $"Stopping $NAME: "
|
||||
killproc -p "$PIDFILE" -d 10 "$DAEMON"
|
||||
RETVAL="$?"
|
||||
echo
|
||||
[ $RETVAL = 0 ] && rm -f "$PIDFILE"
|
||||
return "$RETVAL"
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
start
|
||||
;;
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
restart)
|
||||
stop
|
||||
start
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $NAME {start|stop|restart}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
exit $RETVAL
|
||||
|
||||
register RHEL init-config
|
||||
-------------------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
chkconfig punani on
|
||||
service punani start
|
16
content/posts/dropbear.rst
Normal file
16
content/posts/dropbear.rst
Normal file
|
@ -0,0 +1,16 @@
|
|||
Dropbear Public-Key Authentication
|
||||
##################################
|
||||
:date: 2012-08-07 00:00
|
||||
:tags: openssh, dropbear
|
||||
|
||||
ssh-copy-id does not work out of the box for dropbear.
|
||||
The issue is that dropbear may only have one authorizedKeys file while openssh
|
||||
handles this file for each user.
|
||||
To fix it symlink the root users authorizedKeys file to the dropbear one.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
openwrt>> ln -s /root/.ssh/authorized_keys /etc/dropbear/
|
||||
remote>>> ssh-copy-id root@openwrt
|
||||
|
||||
There, you fixed it
|
51
content/posts/enable-xattr-for-dumb-filesystems.rst
Normal file
51
content/posts/enable-xattr-for-dumb-filesystems.rst
Normal file
|
@ -0,0 +1,51 @@
|
|||
Enable xattr for dumb filesystems
|
||||
#################################
|
||||
|
||||
:date: 2012-05-04 15:17
|
||||
:tags: xattr,filesystems,davfs
|
||||
|
||||
I was looking for a way to enable extended attributes for encfs(userland
|
||||
crypto Wrapper) on davfs(userland Webdav fs wrapper) to use them with
|
||||
glusterfs to create a high-availability distributed secure cloud storage
|
||||
on the cheap.
|
||||
|
||||
After many hours looking for a way to enable xattrs on encfs or ecryptfs
|
||||
and davfs or wdfs i found pyfilesystems to write and mount an xattr
|
||||
wrapper for the retard fs.
|
||||
|
||||
|
||||
install pyfilesystem and encfs davfs
|
||||
====================================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install fs
|
||||
apt-get install davfs2 encfs
|
||||
|
||||
mount davfs and encfs
|
||||
=====================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
echo "https://path/to/webdav username password" >> /etc/davfs2/secrets
|
||||
mkdir /mnt/{1,2,3}
|
||||
mkdir /mnt/1/.encfs
|
||||
mount.davfs https://path/to/webdav /mnt/1
|
||||
encfs /mnt/1/.encfs /mnt/2
|
||||
|
||||
|
||||
mount wrapper fs
|
||||
================
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
#?/usr/bin/python
|
||||
from fs.osfs import OSFS
|
||||
from fs.xattrs import SimulateXAttr
|
||||
stupid_fs = OSFS("/mnt/2")
|
||||
xattr_fs = SimulateXAttr(stupid_fs)
|
||||
fuse.mount(xattr_fs,"/mnt/3")
|
||||
|
||||
|
||||
Now /mnt/3 can be used as a brick in glusterfs. Or just use tahoe-lafs ;P
|
50
content/posts/ftp-share-with-python-on-windows.rst
Normal file
50
content/posts/ftp-share-with-python-on-windows.rst
Normal file
|
@ -0,0 +1,50 @@
|
|||
FTP Share with Python on Windows
|
||||
################################
|
||||
:date: 2012-03-06 14:34
|
||||
:tags: ftp,python,windows
|
||||
|
||||
Installation of dependencies
|
||||
============================
|
||||
|
||||
.. code-block:: bat
|
||||
|
||||
#! cmd.exe
|
||||
wget http://www.python.org/ftp/python/2.7.2/python-2.7.2.msi
|
||||
msiexec python-2.7.2.msi
|
||||
# get easy_install
|
||||
wget
|
||||
http://pypi.python.org/packages/2.7/s/setuptools/setuptools-0.6c11.win32-py2.7.exe
|
||||
.\setuptools-0.6.c11.win32-py2.7.exe
|
||||
|
||||
cd c:\Python27\Scripts
|
||||
easy_install pywin32
|
||||
easy_install pyftpdlib
|
||||
|
||||
|
||||
anon_serv.py
|
||||
============
|
||||
in addition to serve anonymous ftp, the current hostname is copied to the
|
||||
clipboard share it via instant-messenger.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
#!/usr/bin/python
|
||||
import socket
|
||||
fullhn=socket.getfqdn()
|
||||
print ("My Hostname: %s" % fullhn )
|
||||
|
||||
import win32clipboard as w
|
||||
import win32con
|
||||
w.OpenClipboard()
|
||||
w.EmptyClipboard()
|
||||
w.SetClipboardData(win32con.CF_TEXT,fullhn)
|
||||
w.CloseClipboard()
|
||||
|
||||
from pyftpdlib import ftpserver
|
||||
authorizer = ftpserver.DummyAuthorizer()
|
||||
authorizer.add_anonymous("C:\\\\ftp",perm="elradfmw")
|
||||
handler = ftpserver.FTPHandler
|
||||
handler.authorizer = authorizer
|
||||
address = ("0.0.0.0", 21)
|
||||
ftpd = ftpserver.FTPServer(address, handler)
|
||||
ftpd.serve_forever()
|
34
content/posts/getting-hama-nano-dvb-t-stick-to-work.rst
Normal file
34
content/posts/getting-hama-nano-dvb-t-stick-to-work.rst
Normal file
|
@ -0,0 +1,34 @@
|
|||
Getting Hama Nano DVB-T Stick to work
|
||||
#####################################
|
||||
:date: 2012-04-12 12:43
|
||||
:tags: dvb-t, rtl2832u
|
||||
|
||||
I initially bought it because i wanted to play around with software
|
||||
defined radio on the cheap [#]_ but haven’t had the time. As this thingy
|
||||
is originally an DVB-T stick i wanted to try this out first. As it
|
||||
contains a fairly common RTL2832U chip, it shouldn’t be that much of a
|
||||
problem. It turns out it is ...
|
||||
|
||||
This Pseudo-script running under Arch Linux.
|
||||
|
||||
install and configure the Hama Nano DVB-T Stick
|
||||
===============================================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
yaourt -S dvb-usb-rtl2832u-openpli
|
||||
modprobe dvb_usb_rtl2832u
|
||||
pacman -S linuxtv-dvb-apps
|
||||
#find a good place for the antenna
|
||||
scan /usr/share/dvb/dvb-t/de-Berlin | tee ~/.mplayer/channels.conf
|
||||
|
||||
# you can also use the most current sender file from :
|
||||
# wget -O de-Berlin http://wiki.ubuntuusers.de/_attachment?target=dvb-utils%2Fchannels.conf%28Berlin%29
|
||||
# scan de-Berlin | ~/.mplayer/channels.conf
|
||||
|
||||
mplayer "dvb://Das Erste"
|
||||
|
||||
If you do not live in Berlin(duh), have a look through /usr/share/dvb/dvb-t folder or have a look at http://wiki.ubuntuusers.de/dvb-utils#Basisdaten for more accurate results.
|
||||
|
||||
.. [#] http://hardware.slashdot.org/story/12/03/31/1914217/software-defined-radio-for-11
|
|
@ -0,0 +1,64 @@
|
|||
install graphite+web with apache2 and collectd
|
||||
##############################################
|
||||
:date: 2012-06-01 10:40
|
||||
:tags: apache, collectd, graphite
|
||||
|
||||
After some try and error, this is how i got graphite and graphite\_web
|
||||
running under a debian derivative (ubuntu 12.04).
|
||||
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
apt-get install python-django python-cairo
|
||||
sudo easy_install django-tagging
|
||||
|
||||
pip install carbon
|
||||
pip install whisper
|
||||
pip install graphite-web
|
||||
cd /opt/graphite/conf
|
||||
cp carbon.conf.example carbon.conf
|
||||
cp graphite.wsgi.example graphite.wsgi
|
||||
cp storage-schemas.conf.example storage-schemas.conf
|
||||
cd ..
|
||||
cp examples/example-graphite-vhost.conf
|
||||
/etc/apache2/sites-enabled/000-default.conf
|
||||
chown www-data:www-data -R storage/ webapp/
|
||||
cd webapp/graphite
|
||||
cp local_settings.py.example local_settings.py
|
||||
python manage.py syncdb
|
||||
python /opt/graphite/bin/carbon-cache.py start
|
||||
/etc/init.d/apache2 restart
|
||||
|
||||
|
||||
See http://geek.michaelgrace.org/2011/09/how-to-install-graphite-on-ubuntu/
|
||||
for a bloated version of the installation.
|
||||
|
||||
configure bucky and collectd
|
||||
============================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#?/bin/sh
|
||||
aptitude install collectd
|
||||
pip install bucky
|
||||
|
||||
cat >>/etc/collectd/collectd.conf <<EOF
|
||||
LoadPlugin "network"
|
||||
<Plugin "network">
|
||||
Server "127.0.0.1" "25826"
|
||||
</Plugin>
|
||||
EOF
|
||||
|
||||
/etc/init.d/collectd restart
|
||||
|
||||
cat >>/etc/supervisor/conf.d/bucky.conf <<EOF
|
||||
[program:bucky]
|
||||
command=/usr/local/bin/bucky
|
||||
redirect_stderr=true
|
||||
user=nobody
|
||||
autorestart=true
|
||||
EOF
|
||||
|
||||
supervisorctl reread
|
||||
supervisorctl update
|
66
content/posts/make-this-blog-post-happen.md
Normal file
66
content/posts/make-this-blog-post-happen.md
Normal file
|
@ -0,0 +1,66 @@
|
|||
Title: Make this blog post happen
|
||||
Date: 2012-02-01 13:20
|
||||
Slug: make-this-blog-post-happen
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>recursive</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shbash -s stable < <(curl |
|
||||
| 1234567891011121314 | -s https://raw.github.com/wayneeseg |
|
||||
| ``` | uin/rvm/master/binscripts/rvm-instal |
|
||||
| | ler)echo '[[ -s $HOME/.rvm/scripts/r |
|
||||
| | vm ]] && source $HOME/.rvm/scripts/r |
|
||||
| | vm' >> ~/.zshrcsource ~/.zshrcrvm in |
|
||||
| | stall 1.9.2 && rvm use 1.9.2rvm ruby |
|
||||
| | gems latestgem install bundlergit cl |
|
||||
| | one git://github.com/imathis/octopre |
|
||||
| | ss.git octopresscd octopressbundle i |
|
||||
| | nstallrake installrake new_post\["Ma |
|
||||
| | ke this blog post happen"\]vim sourc |
|
||||
| | e/_posts/2012-02-01-make-this-blog-p |
|
||||
| | ost-happen.markdownrake generate |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
||||
Disclamer
|
||||
=========
|
||||
|
||||
</p>
|
||||
|
||||
Well, this is my first post. I will post code i am working with here.
|
||||
|
||||
</p>
|
||||
|
||||
Most of the code snippets will be pseudo-code ( tagged by the hash
|
||||
questionmark \#? ). The code can be seen as an digest of the `history`
|
||||
command of my shell or my texteditor.
|
||||
|
||||
</p>
|
||||
|
||||
Be sure not to simply copy-paste my stuff as it will most likely break
|
||||
because i haven’t tested it myself after writing this up even though it
|
||||
*looks* correct ;).
|
||||
|
||||
</p>
|
||||
|
||||
I will mostly not describe what this stuff does more than the head line
|
||||
and probably some tags as i think code is the only thing that matters in
|
||||
the end, everything else can be read up somewhere else.
|
||||
|
||||
</p>
|
||||
|
||||
I guess this blog is somewhat like `Gist` or `Command Line Kung Fu`, but
|
||||
only containing stuff important for me and my work.
|
||||
|
||||
</p>
|
||||
|
44
content/posts/openssl-csr-with-subject-alternative-names.md
Normal file
44
content/posts/openssl-csr-with-subject-alternative-names.md
Normal file
|
@ -0,0 +1,44 @@
|
|||
Title: OpenSSL CSR with Subject Alternative Names
|
||||
Date: 2012-02-07 09:54
|
||||
Slug: openssl-csr-with-subject-alternative-names
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>SAN in CSR</span>
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shcat > my.ncf <<EOF[ req |
|
||||
| 123456789101112131415161718192021222 | ]default_bits = 2048default_ |
|
||||
| 324252627282930 | keyfile = privkey.pemdistinguish |
|
||||
| ``` | ed_name = req_distinguished_namereq |
|
||||
| | _extensions = req_ext # The exte |
|
||||
| | ntions to add to the self signed cer |
|
||||
| | t [ req_distinguished_name ]countryN |
|
||||
| | ame = Country Name (2 lett |
|
||||
| | er code)countryName_default = DEst |
|
||||
| | ateOrProvinceName = State or Provi |
|
||||
| | nce Name (full name)stateOrProvinceN |
|
||||
| | ame_default = Upper CornerlocalityNa |
|
||||
| | me = Locality Name (eg, cit |
|
||||
| | y)localityName_default = Internetor |
|
||||
| | ganizationName = Organizati |
|
||||
| | on Name (eg, company)organizationNam |
|
||||
| | e_default = Krebs CocommonName |
|
||||
| | = Common Name (eg, YOUR name) |
|
||||
| | commonName_default = euer.krebsco |
|
||||
| | .decommonName_max = 64 [ req_ |
|
||||
| | ext ]subjectAltName = @alt_ |
|
||||
| | names [alt_names]DNS.1 = euer.kreb |
|
||||
| | sco.deDNS.2 = euerEOFopenssl req - |
|
||||
| | new -nodes -out my.csr -config my.cn |
|
||||
| | fopenssl req -noout -text -in my.csr |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
100
content/posts/recover-softraidlvm.md
Normal file
100
content/posts/recover-softraidlvm.md
Normal file
|
@ -0,0 +1,100 @@
|
|||
Title: Recover Softraid/LVM
|
||||
Date: 2012-02-06 10:24
|
||||
Slug: recover-softraidlvm
|
||||
|
||||
MD Array fails to assemble
|
||||
--------------------------
|
||||
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>Find the Problem</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shcat /proc/mdstatmdadm - |
|
||||
| 12345 | D --scanmdadm -E --scanmdadm -E /dev |
|
||||
| ``` | /sd[abcdef]1 |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>Try to assemble manually</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shmdadm --stop /dev/md{0, |
|
||||
| 123 | 127}mdadm --assemble /dev/md0 /dev/s |
|
||||
| ``` | d{a,b,c,d,e,f}1 --force |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>recover a failed device in the array</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shmdadm /dev/md0 --re-add |
|
||||
| 123456 | /dev/sdx1 # will likely failmdadm - |
|
||||
| ``` | -zero-superblock /dev/sdx1 # reap th |
|
||||
| | e deviceecho 200000 > /proc/sys/dev/ |
|
||||
| | raid/speed_limit_min # speed up reco |
|
||||
| | verymdadm /dev/md0 --add /dev/sde1sl |
|
||||
| | eep 56000 && echo "FINISHED!" |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
||||
Recover LVM after doing something stupid
|
||||
----------------------------------------
|
||||
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>Restore VolGroup Partitioning</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/sh# imagine you did somet |
|
||||
| 123456789 | hing like 'vgremove vg'vgdisplay -v |
|
||||
| ``` | # > logical volume: empty :(# lvm st |
|
||||
| | ores backup of partitioning, yayvgcf |
|
||||
| | grestore -f /etc/lvm/archive/vg_0001 |
|
||||
| | 8-2146062166.vg -v vgvgchange -ayvgd |
|
||||
| | isplay -v # > logical volume: files1 |
|
||||
| | e2fsck /dev/vg/files1mount /dev/vg/ |
|
||||
| | files1 |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
88
content/posts/smime-and-mutt.md
Normal file
88
content/posts/smime-and-mutt.md
Normal file
|
@ -0,0 +1,88 @@
|
|||
Title: S/MIME and Mutt
|
||||
Date: 2012-02-01 16:32
|
||||
Slug: smime-and-mutt
|
||||
|
||||
This is the PoC shell code for exchaning encrypted mails with MS
|
||||
Outlook.
|
||||
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>enable smime for mutt</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | #?/bin/shecho "source /usr/share |
|
||||
| 1234567891011121314151617181920 | /doc/mutt/samples/smime.rc" >> ~/.mu |
|
||||
| ``` | ttrcsmime_keys init# create private |
|
||||
| | CA and derive mail certificate (see |
|
||||
| | below)# OR # get free trusted certi |
|
||||
| | ficate from # http://www.comod |
|
||||
| | o.com/home/email-security/free-email |
|
||||
| | -certificate.phpsmime_keys add_p12 m |
|
||||
| | ail.p12echo 'set smime_default_key=" |
|
||||
| | <see output above>"' >> ~/.muttrcwge |
|
||||
| | t http://services.support.alcatel-lu |
|
||||
| | cent.com/PKI/rootCA.crtsmime_keys ad |
|
||||
| | d_root rootCA.crtmutt# receive signe |
|
||||
| | d mail of crypto partner## CTRL-K# f |
|
||||
| | ix the ~/.smime/certificates/.index |
|
||||
| | as extraction of complete chains # d |
|
||||
| | oes not work correctly as of today ( |
|
||||
| | 31.01.2012) see Mutt #3559 |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<figure class="code">
|
||||
<figcaption>
|
||||
<span>Create own CA</span>
|
||||
|
||||
</figcaption>
|
||||
<div class="highlight">
|
||||
|
||||
+--------------------------------------+--------------------------------------+
|
||||
| ``` {.line-numbers} | mkdir caopenssl req -new -x509 - |
|
||||
| 123456789101112131415161718192021222 | keyout ca/root_encrypted.key -out ca |
|
||||
| 324252627282930313233343536373839404 | /root.pem -days 9001openssl rsa -in |
|
||||
| 142 | ca/root_encrypted.key > ca/root.keyr |
|
||||
| ``` | m ca/root_encrypted.keycat > root.cn |
|
||||
| | f <<EOF[ ca ]default_ca = ca_default |
|
||||
| | [ ca_default ]dir = ./cacerts = $dir |
|
||||
| | new_certs_dir = $dir/ca.db.certsdata |
|
||||
| | base = $dir/ca.db.indexserial = $dir |
|
||||
| | /ca.db.serialRANDFILE = $dir/ca.db.r |
|
||||
| | andcertificate = $dir/ca.crtprivate_ |
|
||||
| | key = $dir/ca.keydefault_days = 365d |
|
||||
| | efault_crl_days = 30default_md = md5 |
|
||||
| | preserve = nopolicy = generic_policy |
|
||||
| | [ generic_policy ]countryName = opti |
|
||||
| | onalstateOrProvinceName = optionallo |
|
||||
| | calityName = optionalorganizationNam |
|
||||
| | e = optionalorganizationalUnitName = |
|
||||
| | optionalcommonName = suppliedemailA |
|
||||
| | ddress = optionalEOFecho '100001' >c |
|
||||
| | a/ca.db.serialtouch ./ca/ca.db.index |
|
||||
| | mkdir ./ca/ca.db.certsopenssl req -n |
|
||||
| | ew -keyout mail.key -out mail.csr -d |
|
||||
| | ays 9001openssl ca -config root.cnf |
|
||||
| | -out mail.crt -infiles mail.csropens |
|
||||
| | sl pkcs12 -export -inkey mail.key -c |
|
||||
| | ertfile ca/root.crt -out mail.p12 -i |
|
||||
| | n mail.crtsmime_keys add_root ca/roo |
|
||||
| | t.crtsmime_keys add_cert ca/root.crt |
|
||||
+--------------------------------------+--------------------------------------+
|
||||
|
||||
</div>
|
||||
|
||||
</figure>
|
||||
</p>
|
||||
|
35
content/posts/utf8-irssi-madness.md
Normal file
35
content/posts/utf8-irssi-madness.md
Normal file
|
@ -0,0 +1,35 @@
|
|||
Title: Utf8 in an Irssi/tmux/putty/windows Stack
|
||||
Date: 2012-06-22
|
||||
Tags: irssi, utf8
|
||||
|
||||
Getting irssi running with utf8 support in a putty/tmux stack is madness. Here
|
||||
is what you have to do.
|
||||
|
||||
add lines in .{ba,z}shrc:
|
||||
|
||||
:::bash
|
||||
export LANG=en_US.utf8
|
||||
export LC_ALL=en_US.utf8
|
||||
|
||||
add lines in .tmux.conf:
|
||||
|
||||
:::bash
|
||||
set-option -g default-terminal "rxvt"
|
||||
set-window-option -g utf8 on
|
||||
|
||||
in irssi:
|
||||
|
||||
:::bash
|
||||
/set term_charset UTF-8
|
||||
/set recode_autodetect_utf8 ON
|
||||
/set recode_fallback UTF-8
|
||||
/set recode ON
|
||||
/set recode_out_default_charset UTF-8
|
||||
/set recode_transliterate ON
|
||||
/save
|
||||
/quit
|
||||
|
||||
in putty config:
|
||||
|
||||
window -> translation -> Received data assumed to be in which character set: UTF-8
|
||||
-> Use Unicode line drawing code points
|
60
fabfile.py
vendored
Normal file
60
fabfile.py
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
from fabric.api import *
|
||||
import fabric.contrib.project as project
|
||||
import os
|
||||
|
||||
# Local path configuration (can be absolute or relative to fabfile)
|
||||
env.deploy_path = 'output'
|
||||
DEPLOY_PATH = env.deploy_path
|
||||
|
||||
# Remote server configuration
|
||||
production = 'root@localhost:22'
|
||||
dest_path = '/var/www'
|
||||
|
||||
# Rackspace Cloud Files configuration settings
|
||||
env.cloudfiles_username = 'my_rackspace_username'
|
||||
env.cloudfiles_api_key = 'my_rackspace_api_key'
|
||||
env.cloudfiles_container = 'my_cloudfiles_container'
|
||||
|
||||
|
||||
def clean():
|
||||
if os.path.isdir(DEPLOY_PATH):
|
||||
local('rm -rf {deploy_path}'.format(**env))
|
||||
local('mkdir {deploy_path}'.format(**env))
|
||||
|
||||
def build():
|
||||
local('pelican -s pelicanconf.py')
|
||||
|
||||
def rebuild():
|
||||
clean()
|
||||
build()
|
||||
|
||||
def regenerate():
|
||||
local('pelican -r -s pelicanconf.py')
|
||||
|
||||
def serve():
|
||||
local('cd {deploy_path} && python -m SimpleHTTPServer'.format(**env))
|
||||
|
||||
def reserve():
|
||||
build()
|
||||
serve()
|
||||
|
||||
def preview():
|
||||
local('pelican -s publishconf.py')
|
||||
|
||||
def cf_upload():
|
||||
rebuild()
|
||||
local('cd {deploy_path} && '
|
||||
'swift -v -A https://auth.api.rackspacecloud.com/v1.0 '
|
||||
'-U {cloudfiles_username} '
|
||||
'-K {cloudfiles_api_key} '
|
||||
'upload -c {cloudfiles_container} .'.format(**env))
|
||||
|
||||
@hosts(production)
|
||||
def publish():
|
||||
local('pelican -s publishconf.py')
|
||||
project.rsync_project(
|
||||
remote_dir=dest_path,
|
||||
exclude=".DS_Store",
|
||||
local_dir=DEPLOY_PATH.rstrip('/') + '/',
|
||||
delete=True
|
||||
)
|
31
pelicanconf.py
Normal file
31
pelicanconf.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*- #
|
||||
from __future__ import unicode_literals
|
||||
|
||||
AUTHOR = 'makefu'
|
||||
SITENAME = 'only code is pure'
|
||||
SITEURL = 'http://syntax-fehler.de'
|
||||
#SITESUBTITLE = 'A collection of pseudocode snippets'
|
||||
|
||||
TIMEZONE = 'Europe/Berlin'
|
||||
THEME = './pelican-themes/gum/'
|
||||
DEFAULT_LANG = 'en'
|
||||
DEFAULT_CATEGORY = 'misc'
|
||||
# Feed generation is usually not desired when developing
|
||||
FEED_ALL_ATOM = 'feeds/all.atom.xml'
|
||||
CATEGORY_FEED_ATOM = None
|
||||
TRANSLATION_FEED_ATOM = None
|
||||
# Blogroll
|
||||
STATIC_PATHS = [ 'extra/robots.txt', ]
|
||||
EXTRA_PATH_METADATA = { 'extra/robots.txt': {'path': 'robots.txt'}, }
|
||||
|
||||
LINKS = (('exco\'s blog', 'http://excogitation.de'),
|
||||
('Binaergewitter', 'http://krepel.us'),)
|
||||
|
||||
# Social widget
|
||||
SOCIAL = (('@makefoo', 'http://twitter.com/makefoo') ,)
|
||||
DEFAULT_PAGINATION = 10
|
||||
|
||||
# Uncomment following line if you want document-relative URLs when developing
|
||||
RELATIVE_URLS = True
|
||||
MENUITEMS = (( 'RSS', '/feeds/all.atom.xml'),)
|
24
publishconf.py
Normal file
24
publishconf.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*- #
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# This file is only used if you use `make publish` or
|
||||
# explicitly specify it as your config file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.curdir)
|
||||
from pelicanconf import *
|
||||
|
||||
SITEURL = 'http://syntax-fehler.de'
|
||||
RELATIVE_URLS = True
|
||||
|
||||
FEED_ALL_ATOM = 'feeds/all.atom.xml'
|
||||
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
|
||||
|
||||
DELETE_OUTPUT_DIRECTORY = True
|
||||
|
||||
# Following items are often useful when publishing
|
||||
|
||||
#DISQUS_SITENAME = ""
|
||||
#GOOGLE_ANALYTICS = ""
|
Loading…
Reference in a new issue