add initial content

This commit is contained in:
Peter Tillemans 2023-09-05 14:15:29 +02:00
parent 70dd997451
commit 681a9c58a7
33 changed files with 1259 additions and 0 deletions

91
config.toml Normal file
View file

@ -0,0 +1,91 @@
# The URL the site will be built for
base_url = "https://www.snamellit.com"
# Whether to automatically compile all Sass files in the sass directory
compile_sass = true
# Whether to build a search index to be used later on by a JavaScript library
build_search_index = true
theme = "blow"
[markdown]
# Whether to do syntax highlighting
# Theme can be customised by setting the `highlight_theme` variable to a theme supported by Zola
highlight_code = true
[extra]
enable_search = true
enable_sidebar = true
enable_adsense = true
enable_multilingue = true
adsense_link = "https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=myclientid"
[extra.lang]
items = [
{ lang = "en", links = [
{ base_url = "/", name = "English" },
{ base_url = "/fr", name = "French" },
] },
{ lang = "fr", links = [
{ base_url = "/", name = "Anglais" },
{ base_url = "/fr", name = "Français" },
] },
]
[extra.navbar]
items = [
{ lang = "en", links = [
{ url = "/", name = "Home" },
{ url = "/categories", name = "Categories" },
{ url = "/tags", name = "Tags" },
] },
{ lang = "fr", links = [
{ url = "/fr", name = "Accueil" },
{ url = "/fr/categories", name = "Categories" },
{ url = "/fr/tags", name = "Tags" },
] },
]
title = "title"
[extra.sidebar]
items = [
{ lang = "en", links = [
{ url = "/markdown", name = "Markdown" },
{ url = "/blog", name = "Blog" },
] },
{ lang = "fr", links = [
{ url = "/fr/markdown", name = "Markdown" },
{ url = "/fr/blog", name = "Blog" },
] },
]
# Index page
[extra.index]
title = "Snamellit BV"
image = "/images/mugshot.jpg"
image_alt = "Profile picture"
[extra.default_author]
name = "Peter Tillemans"
avatar = "/images/mugshot.jpg"
avatar_alt = "Profile picture"
[extra.social]
github = "https://github.com/ptillemans"
gitlab = "https://gitlab.com/ptillemans"
twitter = ""
mastodon = "https://social.snamellit.com/users/snamellit"
linkedin = "https://www.linkedin.com/in/ptillemans/"
stackoverflow = "https://stackoverflow.com/users/350890/peter-tillemans"
email = "pti@snamellit.com"
telegram = "https://t.me/snamellit"
[extra.favicon]
favicon = "/icons/favicon.ico"
favicon_16x16 = "/icons/favicon-16x16.png"
favicon_32x32 = "/icons/favicon-32x32.png"
apple_touch_icon = "/icons/apple-touch-icon.png"
android_chrome_512 = "/icons/android-chrome-512x512.png"
android_chrome_192 = "/icons/android-chrome-192x192.png"
manifest = "/icons/site.webmanifest"

4
content/_index.md Normal file
View file

@ -0,0 +1,4 @@
---
title: "Snamellit"
---
# Welcome to Snamellit

5
content/about.md Normal file
View file

@ -0,0 +1,5 @@
---
title: "Snamellit"
date: "2023-09-05"
---
# Welcome to Snamellit

4
content/blog/_index.md Normal file
View file

@ -0,0 +1,4 @@
---
title: "Snamellit"
---
# Welcome to Snamellit

View file

@ -0,0 +1,59 @@
---
title: 'Adding audit information to entities in the Seam framework.'
date: 2008-11-11T13:15:51.000Z
draft: false
---
Since we write a lot of stuff for use in the production side of the business we need to comply with rules which allow effective control that the processes and procedures have been followed. In the automotive industry people take these measures seriously.
One of these requirements we often get is that all configuration data elements must contain information on who created this information and when, and similarly for the last update.
Since the advent of EJB3 and JPA we now have nice POJO entities which clearly show the structure of the data and the last thing we want is to add a bunch of boilerplate code which multiplies the number of code lines with redundant stuff.
This [blogpost of Daniel Pfeifer](http://www.mindbug.org/2008/04/automatic-tracing-of-entity-changes.html) shows how to do this using an entity listener. However, it does not work using Seam since we cannot get the user information through the EJBContext, at least not on JBoss 4.2.3.
After wasting the better part of an afternoon barking up the wrong tree, the answer came to me next morning during breakfast. I have the unhealthy habit of reading PDF-files between bites, in this case chapter 15 of the Seam manual. The security section. (Yes, I know, I hould have done this sooner, you know : "if all else fails, read the manual.")
Seam provides a nice utility class aptly named _org.jboss.seam.security.Identity_ which provides all we need to get the name of the person logged in the current session. So replacing the implementation of _getCallerIdentity()_ with the following, we now have nice audit trails.
>
> /\*\*
> \* Get the name of the currently logged in user.
> \*
> \* This part contains the framework specific code to get the active username.
> \* In this case we use the Seam provided Identity.instance() to get at this
> \* information.
> \*
> \* @param t
> \* @return
> \*/
> private String getCallerIdentity(Traceable t) {
>    try {
>      // get the identity principal to get information about the current active user
>      Principal principal = Identity.instance().getPrincipal();
>      if (log.isDebugEnabled())
>        log.debug("AuditLogger.getCallerIdentity returned " + principal.getName());
>
>      return principal.getName();
>
>    } catch (Exception e) {
>      log.error("Exception received : " + e.getClass().getName() + ":" + e.getMessage());
>      return "unknown";
>    }
> }
I provided both the interface and mapped superclass implementations for convenience. All what is now remaining is to extend our configuration POJO's from the TraceableEntity class (and create an update script, and modify the UI to get at the audit info, ...) to make our stuff auditable.

View file

@ -0,0 +1,274 @@
---
title: 'Building debian packages in a cleanroom'
date: 2011-07-14T16:19:00.000Z
draft: false
---
### Overview and Goals
We build our solutions mostly on Ubuntu Natty and we deploy to Debian (currently lenny). One problem we face is that Debian has a slow release cycle and the packages are dated. Before a new release is approved and deployed to our target servers it can still take many months causing us to have to use up to 3 year old technology. So we are often faced to 'backport' packages or debianize existing packages if we want to use the current releases. In the past we had different build servers for the target architectures. However this is a heavy solution and scales poorly. It also makes upgrading to the next release that much heavier. So we need a system for building debian packages that is :
1. Fully automated
2. Target multiple distributions Debian (lenny,squeeze) and Ubuntu(natty, maverick)
3. Build on development machines(a) and Jenkins/Hudson CI servers(b)
4. easily configurable
5. memorizable process
The goal is to make packages for internal consumption, and the process outlined here falls short of the community standards.
### Enter pbuilder
Of course we are not the first or only one with this issue. In fact we are laggards and there are excellent articles on the 'net to help us with these goals. e.g.
* [PBuilder User Manual](http://www.netfort.gr.jp/~dancer/software/pbuilder-doc/pbuilder-doc.html)
* [Pbuilder Tricks on the Debian Wiki](http://wiki.debian.org/PbuilderTricks)
* [PBuilder How To over at the Ubuntu Wiki](https://wiki.ubuntu.com/PbuilderHowto)
The **pbuilder** program create a clean room environment of a freshly installed empty debian or ubuntu distro, chroot into it and starts building based on the project metadata, mostly from the **debian/control** file. It does this by unpacking a preconfigured **base** image of the selectable target , installing the build dependencies, building the package in the cleanroom, moving the artifacts to the hosting machine and cleaning everything up again. And it does this actually surprisingly fast. This clearly satisfies goals 1 and 2 (and half of 3 if we assume a developer has full control over his laptop). The **pbuilder** is configured through commandline options, which are clear and friendly enough but you end up with commandlines of several lines long which are impossible to type in a shell and are a maintenance nightmare in build scripts (clearly conflicts with point 5). Also in the ideal world we would be able to retarget a build without touching the checked out files, e.g. with environment variable (see goals 3 and 4).
### Configuring pbuilder
On the Pbuilders Tricks page I found a big smart shell script to use as the **pbuilder** configuration file **~/.pbuilderrc**.
\# Codenames for Debian suites according to their alias. Update these when needed.
UNSTABLE\_CODENAME="sid"
TESTING\_CODENAME="wheezy"
STABLE\_CODENAME="squeeze"
OLDSTABLE\_CODENAME="lenny"
STABLE\_BACKPORTS\_SUITE="$STABLE\_CODENAME-backports"
\# List of Debian suites.
DEBIAN\_SUITES=($UNSTABLE\_CODENAME $TESTING\_CODENAME $STABLE\_CODENAME $OLDSTABLE\_CODENAME
"unstable" "testing" "stable" "oldstable")
\# List of Ubuntu suites. Update these when needed.
UBUNTU\_SUITES=("natty" "maverick" "jaunty" "intrepid" "hardy" "gutsy")
\# Mirrors to use. Update these to your preferred mirror.
DEBIAN\_MIRROR="ftp.be.debian.org"
UBUNTU\_MIRROR="mirrors.kernel.org"
\# Optionally use the changelog of a package to determine the suite to use if
\# none set.
if \[ -z "${DIST}" \] && \[ -r "debian/changelog" \]; then
DIST=$(dpkg-parsechangelog | awk '/^Distribution: / {print $2}')
# Use the unstable suite for Debian experimental packages.
if \[ "${DIST}" == "experimental" \]; then
DIST="unstable"
fi
fi
\# Optionally set a default distribution if none is used. Note that you can set
\# your own default (i.e. ${DIST:="unstable"}).
: ${DIST:="$(lsb\_release --short --codename)"}
\# Optionally set the architecture to the host architecture if none set. Note
\# that you can set your own default (i.e. ${ARCH:="i386"}).
: ${ARCH:="$(dpkg --print-architecture)"}
NAME="$DIST"
if \[ -n "${ARCH}" \]; then
NAME="$NAME-$ARCH"
DEBOOTSTRAPOPTS=("--arch" "$ARCH" "${DEBOOTSTRAPOPTS\[@\]}")
fi
BASETGZ="/var/cache/pbuilder/$NAME-base.tgz"
DISTRIBUTION="$DIST"
BUILDRESULT="/var/cache/pbuilder/$NAME/result/"
APTCACHE="/var/cache/pbuilder/$NAME/aptcache/"
BUILDPLACE="/var/cache/pbuilder/build/"
\# make sure folders exist
mkdir -p $BUILDRESULT
mkdir -p $APTCACHE
echo "Target : $BUILDRESULT" >>/tmp/dist
if $(echo ${DEBIAN\_SUITES\[@\]} | grep -q $DIST); then
OTHERMIRROR="deb file:///var/cache/pbuilder/$NAME/result ./"
BINDMOUNTS="/var/cache/pbuilder/$NAME/result"
HOOKDIR="/var/cache/pbuilder/$NAME/hooks"
EXTRAPACKAGES="apt-utils"
# Debian configuration
MIRRORSITE="http://$DEBIAN\_MIRROR/debian/"
COMPONENTS="main contrib non-free"
DEBOOTSTRAPOPTS=("${DEBOOTSTRAPOPTS\[@\]}" "--keyring=/usr/share/keyrings/debian-archive-keyring.gpg")
if $(echo "$STABLE\_CODENAME stable" | grep -q $DIST); then
EXTRAPACKAGES="$EXTRAPACKAGES debian-backports-keyring"
OTHERMIRROR="$OTHERMIRROR | deb http://www.backports.org/debian $STABLE\_BACKPORTS\_SUITE $COMPONENTS"
fi
elif $(echo ${UBUNTU\_SUITES\[@\]} | grep -q $DIST); then
# Ubuntu configuration
MIRRORSITE="http://$UBUNTU\_MIRROR/ubuntu/"
COMPONENTS="main restricted universe multiverse"
DEBOOTSTRAPOPTS=("${DEBOOTSTRAPOPTS\[@\]}" "--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg")
else
echo "Unknown distribution: $DIST"
exit 1
fi
I just updated the distribution names to the current situation and added the directory where the packages are collected as a repository so subsequent builds can use these packages as dependencies. I also specified the keyrings to use for Debian and Ubuntu and made sure the expected folders are created to mount them in the clean room. I created this in my account on my development laptop and added a symbolic link in ~root/.pbuilderrc to this file so I can update it from my desktop environment and do not have to get my brain all twisted up to try to remember with which configuration I am busy in my shell, sudo, **su -**, … THe way the script works is that the configuration adapts itself to the content of the **DIST** and **ARCH** environment variables. So to configure **lenny-amd64** as target is sufficient to do
~ > export DIST=lenny
~ > export ARCH=amd64
This approach is also perfect Jenkins or Hudson to determine the target build from checked out sources, since it can be specified in the build recipe. (satisfies goals 3b, 4 and 5) Since we have to run these programs using sudo we must make sure the environment variables are passed by sudo. We can do this in the **Defaults** line of the **/etc/sudoers** file with the **envkeep** instruction.
...
Defaults env\_reset,env\_keep="DIST ARCH http\_proxy ftp\_proxy
https\_proxy no\_proxy"
... snip ...
\# Cmnd alias specification
Cmnd\_Alias PBUILDER=/usr/sbin/pbuilder, /usr/bin/pdebuild
... snip to end of file ...
\# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) ALL
pti ALL=(ALL) NOPASSWD: PBUILDER
jenkins ALL=(ALL) NOPASSWD: PBUILDER
\#includedir /etc/sudoers.d
You add the **DIST** and **ARCH** variables there. I also included the environment variables for proxying so I can easily switch between environment on my laptop and these changes propagate to sudo (which is also useful for plain apt-get, by the way). I also added a line to show how to make the tools available for a user without having to give their password. This is not needed for interactive work, but very much so for the user as which the CI server is running (in our case **jenkins**). Note that the definition should be after the group definitions, otherwise these take precedence and jenkins has to provide his password (read: is hanging during the build).
### Creating the target base images
The heavy lifting is now done. Let's create an **base.tgz** for lenny-amd64.
~ > export DIST=lenny
~ > export ARCH=amd64
~ > sudo pbuilder create
Now go and have a cup of coffee (or read some emails). Rinse and repeat for the other target platforms.
### Backporting existing packages
In theory backporting would be as simple as
~ ᐅ cd tmp
~/tmp ᐅ apt-get source mongodb
Reading package lists... Done
Building dependency tree
Reading state information... Done
Need to get 1,316 kB of source archives.
Get:1 http://be.archive.ubuntu.com/ubuntu/ natty/universe mongodb 1:1.6.3-1ubuntu2 (dsc) \[2,276 B\]
Get:2 http://be.archive.ubuntu.com/ubuntu/ natty/universe mongodb 1:1.6.3-1ubuntu2 (tar) \[1,285 kB\]
Get:3 http://be.archive.ubuntu.com/ubuntu/ natty/universe mongodb 1:1.6.3-1ubuntu2 (diff) \[29.0 kB\]
Fetched 1,316 kB in 1s (679 kB/s)
gpgv: Signature made Thu 17 Mar 2011 11:49:37 PM CET using RSA key ID D5946E0F
gpgv: Can't check signature: public key not found
dpkg-source: warning: failed to verify signature on ./mongodb\_1.6.3-1ubuntu2.dsc
dpkg-source: info: extracting mongodb in mongodb-1.6.3
dpkg-source: info: unpacking mongodb\_1.6.3.orig.tar.gz
dpkg-source: info: unpacking mongodb\_1.6.3-1ubuntu2.debian.tar.gz
dpkg-source: info: applying debian-changes-1:1.6.3-1
dpkg-source: info: applying build-process-remove-rpath
dpkg-source: info: applying mozjs185
~/tmp ᐅ DIST=lenny ARCH=amd64 sudo pbuilder build mongodb\_1.6.3-1ubuntu2.dsc
I: using fakeroot in build.
I: Current time: Thu Jul 14 14:28:17 CEST 2011
I: pbuilder-time-stamp: 1310646497
I: Building the build Environment
I: extracting base tarball \[/var/cache/pbuilder/lenny-amd64-base.tgz\]
...
and you should get a nice set of debian packages in \*/var/cache/pbuilder/lenny-amd64. In practice you will often end up with errors like :
... snip ...
The following packages have unmet dependencies:
pbuilder-satisfydepends-dummy: Depends: xulrunner-dev (>= 2.0~) but it is not installable
The following actions will resolve these dependencies:
Remove the following packages:
pbuilder-satisfydepends-dummy
Score is -9850
Writing extended state information... Done
... snip ...
I: cleaning the build env
I: removing directory /var/cache/pbuilder/build//6279 and its subdirectories
In these case you have to walk the dependency tree till you find the leafs, and walk back up the branches to the trunk. Note also that chances are that unless you target machines which only serve a very specific purpose, you might end up with packages which are uninstallable since you pull out the rug from other installed packages. However we have the principle to use 1 virtual host to deliver 1 service, hence there are very little packages deployed to them and nothing complicated like desktop environments. Simple leaf packages often build without a hitch:
~/tmp ᐅ DIST=lenny sudo pbuilder build libevent\_1.4.13-stable-1.dsc
I: using fakeroot in build.
I: Current time: Thu Jul 14 14:44:00 CEST 2011
I: pbuilder-time-stamp: 1310647440
I: Building the build Environment
I: extracting base tarball \[/var/cache/pbuilder/lenny-amd64-base.tgz\]
I: creating local configuration
I: copying local configuration
I: mounting /proc filesystem
I: mounting /dev/pts filesystem
I: Mounting /var/cache/pbuilder/ccache
... snip ...
dpkg-genchanges: including full source code in upload
dpkg-buildpackage: full upload (original source is included)
W: no hooks of type B found -- ignoring
I: Copying back the cached apt archive contents
I: unmounting /var/cache/pbuilder/lenny-amd64/result filesystem
I: unmounting /var/cache/pbuilder/ccache filesystem
I: unmounting dev/pts filesystem
I: unmounting proc filesystem
I: cleaning the build env
I: removing directory /var/cache/pbuilder/build//15214 and its subdirectories
I: Current time: Thu Jul 14 14:49:57 CEST 2011
I: pbuilder-time-stamp: 1310647797
~/tmp ᐅ ls -al /var/cache/pbuilder/lenny-amd64/result
total 5260
drwxr-xr-x 2 root root 4096 2011-07-13 19:47 .
drwxr-xr-x 5 root root 4096 2011-07-13 19:13 ..
-rw-r--r-- 1 pti pti 2853 2011-07-14 14:49 libevent\_1.4.13-stable-1\_amd64.changes
-rw-r--r-- 1 pti pti 9129 2011-07-14 14:49 libevent\_1.4.13-stable-1.diff.gz
-rw-r--r-- 1 pti pti 907 2011-07-14 14:49 libevent\_1.4.13-stable-1.dsc
-rw-r--r-- 1 pti pti 499603 2009-12-05 23:04 libevent\_1.4.13-stable.orig.tar.gz
-rw-r--r-- 1 pti pti 61956 2011-07-14 14:49 libevent-1.4-2\_1.4.13-stable-1\_amd64.deb
-rw-r--r-- 1 pti pti 31262 2011-07-14 14:49 libevent-core-1.4-2\_1.4.13-stable-1\_amd64.deb
-rw-r--r-- 1 pti pti 172950 2011-07-14 14:49 libevent-dev\_1.4.13-stable-1\_amd64.deb
-rw-r--r-- 1 pti pti 51588 2011-07-14 14:49 libevent-extra-1.4-2\_1.4.13-stable-1\_amd64.deb
-rw-r--r-- 1 root root 9051 2011-07-14 14:48 Packages
~/tmp ᐅ
### Using **pdebuild** for building packages
Many of our packages are debianized and can be build using **debuild**. I use here the Ubuntu sources of tokyocabinet as an example (which uses the libevent package we just built, btw):
~/tmp/tokyocabinet-1.4.37 ᐅ DIST=lenny ARCH=amd64 pdebuild
...snip...
dpkg-genchanges >../tokyocabinet\_1.4.37-6ubuntu1\_amd64.changes
dpkg-genchanges: not including original source code in upload
dpkg-buildpackage: binary and diff upload (original source NOT included)
W: no hooks of type B found -- ignoring
I: Copying back the cached apt archive contents
I: unmounting /var/cache/pbuilder/lenny-amd64/result filesystem
I: unmounting /var/cache/pbuilder/ccache filesystem
I: unmounting dev/pts filesystem
I: unmounting proc filesystem
I: cleaning the build env
I: removing directory /var/cache/pbuilder/build//4199 and its subdirectories
I: Current time: Thu Jul 14 15:05:27 CEST 2011
I: pbuilder-time-stamp: 1310648727
~/tmp/tokyocabinet-1.4.37 ᐅ ls /var/cache/pbuilder/lenny-amd64/result
...snip...
tokyocabinet\_1.4.37-6ubuntu1\_amd64.changes
tokyocabinet\_1.4.37-6ubuntu1.debian.tar.gz
tokyocabinet\_1.4.37-6ubuntu1.dsc
tokyocabinet\_1.4.37.orig.tar.gz
tokyocabinet-bin\_1.4.37-6ubuntu1\_amd64.deb
tokyocabinet-doc\_1.4.37-6ubuntu1\_all.deb
Sometimes the dependencies break on the version of debhelpers. This version is added conservatively by the dh\* scripts and often is overly conservative. Many packages build just fine with older versions of the debhelpers.
### Setting up automated build
To set this up on the build server we have to replicate the steps above
1. Create the ~/.pbuilderrc file
2. Symbolic link to this file in ~root/.pbuilderrc
3. Allow jenkins to use sudo for building packages
4. Create a jenkins job to (re)build the packages
5. Create jobs to build the packages

View file

@ -0,0 +1,7 @@
---
title: 'Deliverables and Activities Ahah!! Moment'
date: 2008-07-15T01:32:39.000Z
draft: false
---
Something I already knew became suddenly clear today : the product or product pieces in the WBS and the relation to the activities. Although I already knew for a long time that it is good practice to make the WBS deliverable oriented instead of activity oriented it remained always a gradient where activities blended seamlessly in deliverables. The key was that I used a mental trick derived from 'Getting Things Done', which says that you must write your activities action oriented, with a verb, active voice, i.e. do something. I was rephrasing the activities this way (more bang per spreadsheet cell).  Now I applied this reasoning to the WBS work packages, but I rewrote it as things, or part of things. Again the clarity improved considerably and wordiness got down. And then : klabammm.... flash of light : activities were clearly separated from the WBS work packages, the grey area between them was gone!!!. I am quite sure if I read my PM books again I will find this trick in every single one, but I had to "invent" it myself before I understood and felt it, instead of just knowing it.

View file

@ -0,0 +1,48 @@
---
title: 'Disable authentication for global proxy settings on Ubuntu'
date: 2011-07-22T14:33:00.000Z
draft: false
---
2011-07-22 Fri 16:33 [Figure out proxy settings Linux](snamellit.html#ID-c965ad9d-522b-4dfe-9574-b8d2a78c83a3) Ubuntu has a Network Proxy chooser which allows you to select a location (a la MacOSX). This works well enough except that the UI is a bit counter-intuitive (in my humble opinion)which causes me to regularly nuke some predefined setting inadvertently. This is not a big deal though. However for update manager (and several other tools) to pick up the new proxy settings you need to push the settings down to the system level. This takes 2 times typing your password. Now, this IS a big deal. When I go back and forth between work and home I have to change this at least 2 times per day. Also it irks me that a detail setting like the proxy is not auto-detected and I need to login to change this 'system' setting. My laptop is essentially a single user system and I do not see switching the proxy as a serious security issue, even with 3 kids running around the home. To come back to auto-detection, while this works fine at work, it fails to figure out that at home that there is a direct connection to the Internet. I can probably fix this by replacing my aging wireless router with my Time Capsule as the Internet gateway router, but I prefer to have the Time Capsule close to my desk. In any case the **Network proxy** shows 2 times the authentication dialog box. A particularly nice feature (Is this new in Natty?) is that the dialog shows for which DBUS setting access is being asked. The first dialog asks access to **com.ubuntu.systemservice.setProxy**. This response is configured in the file **/usr/share/polkit-1/actions/com.ubuntu.systemservice.policy**. This is a very readable XML file which contains a section for the **setProxy** action. I feel no reservation in allowing unchecked access to the **setProxy**. Although this might make a man-in-the-middle attack easier someone with the sophistication to pull this off, does not need to doctor my PC to do it.
<action id="com.ubuntu.systemservice.setproxy"\>
<description>Set current global proxy</description>
<message>System policy prevents setting proxy settings</message>
<defaults>
<!-- PTI : original settings
<allow\_inactive>no</allow\_inactive>
<allow\_active>auth\_admin\_keep</allow\_active>
-->
<allow\_inactive>yes</allow\_inactive>
<allow\_active>yes</allow\_active>
</defaults>
</action>
Retrying, and indeed one of the authentication requests dropped. Note that the action was configured with **auth\_admin\_keep** which according to the docs would mean we should be authenticated for some time,so I would not expect the second authentication I am getting. Must be a subtlety which escapes me at the moment. The second action is more problematic since the **set-system** om the system gconf settings is much less fine-grained than **setProxy** and can potentially cause more damage to the system.
<action id="org.gnome.gconf.defaults.set-system"\>
<description gettext-domain="GConf2"\>Change GConf system values</description>
<message gettext-domain="GConf2"\>Privileges are required to change GConf system values</message>
<defaults>
<allow\_inactive>no</allow\_inactive>
<!-- PTI: Original setting
<allow\_active>auth\_admin</allow\_active>
-->
<allow\_active>yes</allow\_active>
</defaults>
</action>
After relaxing this second method, I can finally easily switch proxies between Locations. There are several things bugging me:
1. The set-system method really is too wide in scope.
2. There should be a more elegant way that modifying files under **/usr/share**
3. My system should actually switch location unaided.
For the time being, I fixed a frustration and learned something in the process. The result is not yet fully satisfactory, but that will improve over time. \*\*
* [Tutorial explaining editing static files](http://ubuntu.paslah.com/policykit/)
* [pklocalauthority man page](http://hal.freedesktop.org/docs/polkit/pklocalauthority.8.html)
* [Install Updates as Unprivileged User](http://forums.opensuse.org/english/get-technical-help-here/applications/433124-install-updates-unprivileged-user.html)
Of course there is a more elegant solution than editing files under **/usr/share** folder. Everything is explained in the man pages **pklocalauthority** and **PolicyKit.conf** (among others). But that's for another day...

View file

@ -0,0 +1,38 @@
---
title: 'Eclipse Ganymede crashes on 64-bit Ubuntu Hardy Herron'
date: 2008-07-12T06:56:04.000Z
draft: false
---
In java6 there is a bug in the 64-bit linux version of the jvm which causes eclipse to crash when opening projects with some aditional plugins installed. Both the openjdk as the sun versions are affected. for more info see :
* [Bug #206620 in openjdk-6 (Ubuntu)](https://bugs.launchpad.net/ubuntu/+source/openjdk-6/+bug/206620)
* [Bug#478560: sun-java6-jdk: SIGSEGV when loading Eclipse workspace](http://groups.google.com/group/linux.debian.bugs.dist/browse_thread/thread/d081a6b4113c9ee4)
In the mean time this has been fixed in openjdk7, but this might take a while before this shows up in the repositories. In the mean time the jdk5 does not have this problem and works perfectly well with eclipse. So a quick
sudo aptitude install sun-java5-jdk
installs the needed software. However if the java6 is still installed, chances are it will be preferred over the java5 version of the programs. There are 2 ways to make sure eclipse uses java5 **Use java5 for ALL programs.** To change the preference tell the alternatives system to use the java5 version.
pti@pti-laptop:~$ **sudo update-alternatives --config java**
\[sudo\] password for pti:
There are 3 alternatives which provide \`java'.
Selection Alternative
-----------------------------------------------
1 /usr/lib/jvm/java-6-sun/jre/bin/java
2 /usr/lib/jvm/java-1.5.0-sun/jre/bin/java
\* 3 /usr/lib/jvm/java-6-openjdk/jre/bin/java
Press enter to keep the default\[\*\], or type selection number: **2**
Using '/usr/lib/jvm/java-1.5.0-sun/jre/bin/java' to provide 'java'.
To choose the alternatives with a GUI it is a good idea to install **galternatives**. **Tell only eclipse to use java5.** Since the bug only shows up on my system (so far) in eclipse, I prefer the speedier java6 as default. Luckiliy we can tell eclipse to use a specific jvm in the **eclipse.ini** file. Open **eclipse.ini** in your eclipse home with your favorite editor and add the following 2 lines to the beginning of the file
-vm
/usr/lib/jvm/java-1.5.0-sun/jre/bin/java
This will tell the eclipse launcher to use java5 to launch eclipse.

View file

@ -0,0 +1,59 @@
---
title: 'Enable real idletime for Org Mode on Ubuntu'
date: 2011-07-04T09:11:00.000Z
draft: false
---
Org Mode can use the idle time to correct time tracking entries. On the Mac this works on the idle time of the computer, on other platforms it uses the idle time of emacs. Hence if you do a significant task in another program it will be idle for org-mode. There is a little program delivered with org-mode sources to estimate the "real" idle time based on the information used by screensavers. Unfortunaltely it is in source code and needs to be compiled first. Everything is actually well documented, just scattered. Let's see where that little program is located
$ locate x11idle
/home/pti/.emacs.d/vendor/org-mode/UTILITIES/x11idle.c
This conveniently shows the location of the source file. If you might find a **x11info** file (without .c) in a bin directory then it is already installed and you're done. If you get no results and there is some remark that the database is too old then it is time to refresh it using **sudo updatedb**. Then the lookup should find the file. Now go to the folder where the file is located. In my case :
$ cd ~/.emacs.d/vendor/org-mode/UTILITIES
We need to make sure we have the necessary X screensaver libraries for compiling :```
$ sudo apt-get install lib-xss-de
sudo apt-get install libxss-dev
Reading package lists... Done
Building dependency tree
Reading state information... Done
The following extra packages will be installed:
x11proto-scrnsaver-dev
The following NEW packages will be installed:
libxss-dev x11proto-scrnsaver-dev
0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
Need to get 34.1 kB of archives.
After this operation, 225 kB of additional disk space will be used.
Do you want to continue \[Y/n\]?
Get:1 http://be.archive.ubuntu.com/ubuntu/ natty/main x11proto-scrnsaver-dev all 1.2.1-1 \[21.3 kB\]
Get:2 http://be.archive.ubuntu.com/ubuntu/ natty/main libxss-dev amd64 1:1.2.1-1 \[12.9 kB\]
Fetched 34.1 kB in 0s (200 kB/s)
Selecting previously deselected package x11proto-scrnsaver-dev.
(Reading database ... 567603 files and directories currently installed.)
Unpacking x11proto-scrnsaver-dev (from .../x11proto-scrnsaver-dev\_1.2.1-1\_all.deb) ...
Selecting previously deselected package libxss-dev.
Unpacking libxss-dev (from .../libxss-dev\_1%3a1.2.1-1\_amd64.deb) ...
Processing triggers for man-db ...
Setting up x11proto-scrnsaver-dev (1.2.1-1) ...
Setting up libxss-dev (1:1.2.1-1) ...
Now we can compile **x11idle**. We need to specify to link against the X11 and X Screensaver libraries (Xss). I immediately place the resulting executable file in **~/bin/x11idle**. Alternatively you could use the /usr/local/bin folder, but first compile to a temporary location like **/tmp** and move it there : **sudo mv /tmp/x11idle /usr/local/bin**.```
$ gcc -o ~/bin/x11idle -lX11 -lXss x11idle.c
x11idle.c: In function main:
x11idle.c:19:5: warning: format %u expects type unsigned int, but argument 2 has type long unsigned int
$ x11idle
22
$ x11idle
20
I tried to run it using the normal path and got 22 and 20 seconds idle time : it works !!! The orgmode code will now find and use it. For more background info see
* [Blogpost about getting idle time in linux.](http://coderrr.wordpress.com/2008/04/20/getting-idle-time-in-unix/)
* [The message with the patch proposal for including it with org-mode](http://osdir.com/ml/emacs-orgmode-gnu/2009-10/msg00545.html)
2011-07-04 Mon 11:11 [Track down reason for Dangling Time References](file:///home/pti/org/emacs.html)

View file

@ -0,0 +1,50 @@
---
title: 'Fix for Sonar choking on ''result returns more than one elements'''
date: 2011-06-23T12:54:05.000Z
draft: false
---
Recently our sonar installation on our Hudson CI choked again, this time with an error I have not seen before. It was just before the release of an important milestone for the team, so not being unable to publish new version on the test server could not come on a worse time.
In the Console Log of a failing built we found the message.
```
\[INFO\] ------------------------------------------------------------------------
\[INFO\] BUILD FAILURE
\[INFO\] ------------------------------------------------------------------------
\[INFO\] Total time: 5.791s
\[INFO\] Finished at: Thu Jun 23 09:20:05 CEST 2011
\[INFO\] Final Memory: 20M/429M
\[INFO\] ------------------------------------------------------------------------
\[ERROR\] Failed to execute goal org.codehaus.mojo:sonar-maven-plugin:2.0-beta-2:sonar (default-cli) on project pfus: Can not execute Sonar: PicoLifecycleException: method 'public void org.sonar.batch.ProjectTree.start() throws java.io.IOException', instance 'org.sonar.batch.ProjectTree@50a6023a, java.lang.RuntimeException: wrapper: result returns more than one elements -> \[Help 1\]
```
Ok, apparently something returns 2 results which should only have one result and it stands to reason that it is coming from the database. Pity there is no indication on which table would be affected.
Some googling retrieved [this post](http://old.nabble.com/Can-not-execute-Sonar%3A-result-returns-more-than-one-elements-to31369733.html#a31369733) which points to the snapshots table and more specifically to records with the field islast set to true.
A quick check in **.../conf/sonar.properties** revealed the database connection parameters.
Ok, this is a good time to check you have an up-to-date backup of your database. (Or make one if you're not sure)
Using your SQL query tool of choice (I used the built in Data Sources tool in IntelliJ) connect to the database.
The snapshots table contains a project\_id which confusingly does not really contains projects, but actually all kinds of assets, artefacts, files, however you ike to call it. Unfortunately there are many thousands. Even if I limited to only the ones with **islast=1** there were close to 1000 records.
Narrowing further down with :
> ```
> **select project\_id, cnt
> from (select project\_id,
> count(\*) as cnt
> from snapshots
> where islast=1
> group by project\_id) as cntsnap
> where cnt > 1**
> ```
Gave me the project\_id I've been looking for, in my case 2089 Now a quick
> select \* from snapshots where project\_id=2089
Gave the 2 offending rows. A quick glance showed that one of them was very suspicious : the parent\_id was the same as his own project\_id and there were lots of null columns. I deleted the row based on the **id**. Retriggered **hudson** to rebuild the project, and the build succeeded and sonar seems to be happy again. I hope there will be no other repercussions. For our purposes the sonar data is not really critical and we could also restart without any impact. If your sonar data is more critical a better choice would be to restore from backup before the build started failing.

30
content/blog/gallery.md Normal file
View file

@ -0,0 +1,30 @@
---
title: 'Gallery'
date: 2021-11-28T12:57:12.000Z
draft: false
---
Discover. Create. Experience.
=============================
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-1.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-2.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-3.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-4.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-5.jpg)
![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-6.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-7.jpg)
* ![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-8.jpg)
### Let me design your home
[Book a Consultation](http://www.snamellit.com/contact/)

View file

@ -0,0 +1,105 @@
---
title: 'Java is a First Class Citizen on Ubuntu Hardy Linux'
date: 2008-08-08T18:07:52.000Z
draft: false
---
Lately I hear a lot of Java bashing from a very vocal part of the Linux community. I do a lot of Java, I use Linux on my main laptop and I like this just fine. I use daily freemind (Java mindmapper) and openoffice (which is Java enabled). I was doing this for the past years since Windows XP ate my C-drive for the N-th time (and now it did the same on my gaming rig at home, grrr... ).
I keep a 'Tools' directory in my home drive and place there the Java tools and libraries I get from the net. Creating a little bash script in *\~/bin* which calls the startup script from the java tools almost always does the right thing, so I was contented. Well, I need to do some LDAP hacking. Last times I did that in perl, python, ruby and Java so it was groovy's turn.
On a whim I type :
pti@pti-laptop:~/workspace/salesforce$ sudo apt-get install groovy
and to my big surprise I get :```
Reading package lists... Done
Building dependency tree
Reading state information... Done
The following extra packages will be installed:
antlr junit4 libasm2-java libbcel-java libbsf-java libclassworlds-java libcommons-cli-java
libcommons-collections3-java libcommons-lang-java libcommons-logging-java liblog4j1.2-java
libmockobjects-java libmx4j-java libregexp-java libservlet2.3-java libxpp3-java libxstream-java
Suggested packages:
groovy-doc libbcel-java-doc jython rhino libclassworlds-java-doc
libcommons-collections3-java-doc liblogkit-java libavalon-framework-java
libgnumail-java
Recommended packages:
liblog4j1.2-java-gcj
The following NEW packages will be installed:
antlr groovy junit4 libasm2-java libbcel-java libbsf-java
libclassworlds-java libcommons-cli-java libcommons-collections3-java
libcommons-lang-java libcommons-logging-java liblog4j1.2-java
libmockobjects-java libmx4j-java libregexp-java libservlet2.3-java
libxpp3-java libxstream-java
0 upgraded, 18 newly installed, 0 to remove and 0 not upgraded.
Need to get 7025kB of archives.
After this operation, 14.7MB of additional disk space will be used.
Do you want to continue \[Y/n\]?
Ok, this was not what I expected... Of course I type 'Y' and let it rip. Now my curiosity take the upper hand and I try some more :```
pti@pti-laptop:~/workspace/salesforce$ sudo apt-get install maven
Reading package lists... Done
Building dependency tree
Reading state information... Done
E: Couldn't find package maven
Tough luck. But wait, there are 2 mavens, let's try :```
pti@pti-laptop:~/workspace/salesforce$ sudo apt-get install maven2
Reading package lists... Done
Building dependency tree
Reading state information... Done
The following extra packages will be installed:
ant libcommons-codec-java libcommons-collections-java
libcommons-httpclient-java libcommons-net-java libcommons-openpgp-java
libdoxia-java libganymed-ssh2-java libjdom0-java libjsch-java libjtidy-java
liblogkit-java liboro-java libplexus-classworlds-java
libplexus-component-api-java libplexus-container-default-java
libplexus-interactivity-api-java libplexus-utils-java
libplexus-velocity-java libslide-webdavclient-java libwagon-java
libwerken.xpath-java velocity
Suggested packages:
ant-doc libcommons-httpclient-java-doc libcommons-openpgp-java-doc
libdoxia-java-doc libjtidy-java-doc libgnumail-java
libplexus-classworlds-java-doc libplexus-component-api-java-doc
libplexus-container-default-java-doc libplexus-interactivity-api-java-doc
libplexus-utils-java-doc libplexus-velocity-java-doc libwagon-java-doc
velocity-doc
Recommended packages:
ant-optional ant-gcj
The following NEW packages will be installed:
ant libcommons-codec-java libcommons-collections-java
libcommons-httpclient-java libcommons-net-java libcommons-openpgp-java
libdoxia-java libganymed-ssh2-java libjdom0-java libjsch-java libjtidy-java
liblogkit-java liboro-java libplexus-classworlds-java
libplexus-component-api-java libplexus-container-default-java
libplexus-interactivity-api-java libplexus-utils-java
libplexus-velocity-java libslide-webdavclient-java libwagon-java
libwerken.xpath-java maven2 velocity
0 upgraded, 24 newly installed, 0 to remove and 0 not upgraded.
Need to get 6211kB of archives.
After this operation, 25.2MB of additional disk space will be used.
Do you want to continue \[Y/n\]?
Hmmmmm....... This is better.... There is some refence to ant.. Yep, that too... Jedit, jython, jruby, scala (I did not do LDAP with scala yet...) follow in quick succession... Well, time to fire up synaptic and search for Java. A seemingly endless list of packages appears. App Servers :
* glassfish
* tomcat (actually I knew that from before)
* jetty
Dev Tools:
* Emma
* Junit
* Junit4
* JMock
* ...
A bunch of the commons libraries, ... Half a dozen of jdk's. Java bindings for all the windowing toolkits out there : Gtk, Qt-jambi, ... I also finally found the jdbc driver for postgresql which I was frantically looking for a couple of weeks ago and finally downloaded from the postgres site. There is an old eclipse version, but actually this is a tool which I prefer to live in my Tools directory for updating the plugins. The only thing I did not find was findbugs. Well there is probably a lot more missing, but my inspiration dried up. I could clear out half of my tools folders because they are replaced with ubuntu version. I am now even more contented than I was. I just love to type 'apt-get install <something>' when I want something, and now I found more of what I want.

View file

@ -0,0 +1,7 @@
---
title: 'Learnings from the PMI Benelux Day.'
date: 2008-09-28T00:29:48.000Z
draft: false
---
I went to the PMI Benelux Day today. The theme today was _A symphony of Knowledge,_ a theme which ran through the plenary sessions topics. I choose to see the use of earned value techniques applied to the Galileo project presented by Francois Picaut. It was interesting to see how this technique was applied in a quite straightforward manner by just entereing the AC provided by the accountants and the EV provided by the products completed at roughly biweekly milestones. All other performance numbers were calculated from these numbers_._ One learning was that in the case of subcontracted work under a FFP contract, the EV = AC. Of course once you think it over this is evident, but this quarter took a while to drop. Some additional metrics were defined like the ES (Earned Schedule, or the time when the current EV should have been reached) and 'To Complete Performance Indexes' in their cost and scope variations. Apprently these metrics should show the effectiveness of the project management when plotted over time.  He applied EVT on FFP projects with good success as part of project assurance. One anekdote was the case when the project manager presented a 'project on track' report while the EV calculations showed the project would end with a 1.000.000EUR loss. This triggered a discussion about the variances which triggered corrective actions. As such this proved the value of the method to confirm the results from bottom-up or other estimates.  In the area of risk management presented Daniel vander Borcht a session about why the ABCD methodology works where so many other risk management approaches fail. ABCD stnds for Assumption Based Communication Dynamics and a key part is the central role of assumptions in this model. Next to the classic issue register and risk register a assumption register is introduced. I need to research this some more Jean Diederich presented the Test Monkeys and Banana Software talk. He made the case that thinking of testing still occurs way too late in the project. He proposes to involve tests at the earliest opportunity to help start the acceptance test design, release test design, ... in parallel to development rather than afterwards. This is the same story I heard from Suzanne Robertson a couple of weeks ago in the context of requirement gathering. This confirms again my conviction that a good requirements process will make life in the project considerably easier.  A last presentation was by Hedda Pahlson-Muller regarding KPO's or knowledge process outsourcing. This was a very instructive talk which clarified this form of outsourcing. Roughly they provide analysts or other knowledge workers in the company the possibility to have an external team to do the legwork and provide collected data for further analysis by the company itself. Due to this business model they face a glass ceiling to the service they cannot deliver analysis or recommendations. For this they are looking for PM/Consultants to use their experience to analyse and interprete the data for the companies which ask this. Nice interesting day, bought a couple of books, talked PM with other people. Time well spent.

View file

@ -0,0 +1,17 @@
---
title: 'Logging Notification Messages in Ubuntu'
date: 2013-07-22T11:48:16.000Z
draft: false
---
Ubuntu contains a nice notification system to talk to the user about noteworthy events. However when the message dissappears, by default, it is gone. Often I am busy with something else when the notification pops up and I pay little notice. Then somewhere in my brain, something gets triggered by a word, and by the time I focus on the message to **really** read it, the message disappears. Some of these appear after booting or logging in, so it is not trivial to redisplay them. Then I really need a system for logging notification messages for reading what I missed. Ideally there was a scroll back buffer in the notification feature. Maybe there is, but I didn't find it.
Standard Notification Logging
-----------------------------
The developers provided a log in \*~/.cache/notify-osd.log\*. (see \[\[https://wiki.ubuntu.com/NotifyOSD\]\[NotifyOSD page an the ubuntu wiki\]\]) for logging notification messages. So this used to be a moot point as a quick $ tail ~/.cache/notify-osd gave me what I needed. However somewhere between 12.04 and 13.04 the default behavior was changed to no longer log to this file by default. My last entry was from December 2012. It was considered as a developer feature and was disabled for the general public, but controllable using an environment variable : LOG=1. To enable this globally, edit \*/etc/environment\* and add LOG=1 However, since this is such a vague variable, I guess that this will enable logging in more than just \*notify-osd\*. I am fine with this (at least until my disk runs out). The wiki, which actually contains a design document rather than documentation, makes no mention of it. It is also not very logical as the file is reset when logging in, so it should never pose a threat to the disk space. In any case, I dropped a small note in the wiki to point users in the right direction.
Logging Notifications with an App
---------------------------------
There is also an app for that. You can install the package indicator-notifications which keeps track of notifcations that you receive. You can install with the following $ sudo add-apt-repository ppa:jconti/recent-notifications $ sudo apt-get update $ sudo apt-get install indicator-notifications You'll have to log out and log back in. It shows up as a mailbox in the top panel and turns green when you get new messages. For more info about logging notifications see [http://askubuntu.com/questions/288348/how-can-i-read-notifyosd-messages-after-they-are-displayed](http://askubuntu.com/questions/288348/how-can-i-read-notifyosd-messages-after-they-are-displayed "this AskUbuntu Question")

View file

@ -0,0 +1,20 @@
---
title: 'Making dotFiles visible on the Mac'
date: 2011-07-20T20:56:00.000Z
draft: false
---
Dotfiles, can't live with 'em, can't live without 'em. Dot files are the term for folders and files starting with a '.' so they do not show upwhen using plain **ls**. Tha Mac has a cool keycode to toggle visibility of dotfiles in the **File Open/Save** dialog, but this does not work in the finder for one reason or another. In practice this meant I had to deal with dotFiles and dotDriectories I found on the net some incantation to force the setting for the finder to show/hide the dotfiles. Upon restarting the finder the widows will reopen with the updated setting. I found on the net some snippets of sh script (but I forgot where and cannot immediately retrieve it), and I immediately dumped them in my **~/bin** folder. ~/bin/hide-dotfiles :
#!/bin/sh
defaults write com.apple.finder AppleShowAllFiles -bool FALSE
killall Finder
~/bin/show-dotfile :```
#!/bin/sh
defaults write com.apple.finder AppleShowAllFiles -bool TRUE
killall Finder
So in order to toggle the setting in the graphical finderyou drop to the commandline set the flag in the desired state and restart the finders. Works nice… One remark : if you suddenly see strange icons on your desktop, then you probably still have the dotFiles visible selected. **hide-dotfiles** will clean your desktop up again. \*

View file

@ -0,0 +1,7 @@
---
title: 'OpenProj Locks up with Blank Grey Windows'
date: 2008-06-11T11:20:02.000Z
draft: false
---
The symptom is that after starting sometimes there is some blurp dialog and afterwards the _**Tip of the Day**_ dialog appears. This stays grey and the application accepts no more events. You need to kill it to get out of there. This happens at the place which is protected by a firewall and has no transparent proxy. At home it worked fine albeit on my Macbook and not on my Ubuntu laptop. The reason  is that there is some phone home functionality built in and with wireshark I could see the application trying to connect to a webserver. Probably to get the tips of the day. Behind the firewall this did not work and the application is just hanging there. I suspect that sooner or later it will time out, but I am not that patient. Since this is a regular occurence with java applications I also immediately knew that I had to tell it where the proxy can be found. In the file **~/.openproj/run.conf** file replace the line : JAVA\_OPTS="-Xms128m -Xmx768m" with JAVA\_OPTS="-Dhttp.proxyHost=proxy -Dhttp.proxyPort=3128 -Xms128m -Xmx768m" This directs the java runtime library to use the proxy **http://proxy:3128/.** And voila! ... Openproj starts immediately and in its full glory?

View file

@ -0,0 +1,19 @@
---
title: 'Organizing Windows using the Keyboard with Compiz Grid'
date: 2011-06-30T11:44:00.000Z
draft: false
---
The Mac has a great utility called Divvy to easily map windows to loacation on the screen using the keyboard. Fiddling with the mouse to get multiple windows in the right location is a productivity killer and a pain in the neck (or shoulder, or elbow, or …) Ubuntu (and of course any other Linux distro with compiz) has a similar feature built in as a plugin for compiz. Type Alt-F2 and enter **ccsm** + Return in the command prompt to launch the CompizConfig Settings manager. Select the **Window Management** from the left menu and enable the **Grid** plugin. Click on it so you can look at the key bindings in the **Bindings** tab. If you are on a desktop or a big laptop with a separate numeric keyboard you are set. As you can see the locations for the windows are by default mapped in a logical fashion like the arrow keys on the numeric keypad. However my laptop does not have a separate numeric keyboard and enabling it before typing the key code is a pain. Remapping them is easy by clicking on the button with the key code. A window appears with a **Grab key code** button. Click it and type the new keycode you want to assign it to. If there is a conflict, you will get a window explaingin the conflict and asking how to resolve it. My first attempt was to remap the laptop numeric keys using the super keys. This conflicted with the unity launcher since the top row 7-8-9 map to the apps in the launcher. To avoid conflicts I use now Control-Super with the keys around the j-key (which is the home key for the right hand) Also autokey (a text macro expander) is mapped to Super-K
* Control-Super-j : 100% (maximize)
* Control-Super-h : 50% left
* Control-Super-k : 50% right
* Control-Super-u : 50% top
* Control-Super-m : 50% bottom
* Control-Super-y : 25% top-left
* Control-Super-i : 25% top-right
* Control-Super-n : 25% bottom-left
* Control-Super-, : 25% bottom-right
If Control-Super-j is used to maximize a window, clicking on one of the other keys will first restore it to the original size and position and only map it to its place on the second click. I consider this a feature, but you are free to interprete it as a bug. Result, this is now super practical way to divide my windows on my screen.

View file

@ -0,0 +1,19 @@
---
title: 'Proxy Support for Grails'
date: 2011-06-28T11:59:43.000Z
draft: false
---
Not a big thing, actually for me it is. I am always struggling with proxy settings. It requires a lot of different incantations to be done, every program deals with it differently, support for platform settings is flaky, ... Grails deals with this is a way which pleasantly surprised me :
> grails add-proxy <name> --host=<hostname> --port=<portno> e.g. grails add-proxy client --host=proxy --port=3128
allows you to create a setting for a proxy and bind it to a name. It also supports username/password. Switching to the setting involves only
> grails set-proxy client
to enable the proxy setting, and
> grails clear-proxy
when I get back in a transparent environment. (for completeness there is a **remove-proxy** command which is useful to remove those passwords after the need has passed). I particularly impressed  that this was done in a simple and straightforward without the need fo brain gymnastics trying to remember which arcane curse needs to be put at what location in which file. Nice.

View file

@ -0,0 +1,214 @@
---
title: 'Uploading documents to Plone with WebDAV'
date: 2012-03-29T22:03:41.000Z
draft: false
---
Preparing **Plone** to start the WebDAV service and setting the permissions to allow the users to make use of it is only half the battle, actually using it, especially from automated systems like build servers is another struggle.
Using the _Cadaver_ WebDAV client
---------------------------------
Although WebDAV is currently well integrated in modern desktop environments, a CLI alternative is useful for automation, like _Jenkins_ build scripts.
### Automatic Login
Ideally we want password-less operation from script, both from a usability as from a security standpoint.
_Cadaver_ supports automatically logging in to servers requiring authentication via a .netrc file, like the _ftp_ client. The syntax is such that the file can be shared for both tools.
The file ~/.netrc may be used to automatically login to a server requiring authentication. The following tokens (separated by spaces, tabs or newlines) may be used:
#### machine host
Identify a remote machine host which is compared with the hostname given on the command line or as an argument to the open command. Any subsequent tokens up to the end of file or the next machine or default token are associated with this entry.
#### default
This is equivalent to the machine token but matches any hostname. Only one default token may be used and it must be after all machine tokens.
#### login _username_
Specifies the username to use when logging in to the remote machine.
#### password _secret_
Specifies the password to use when logging in to the remote machine. (Alternatively the keyword **passwd** may be used)
#### Example ~/.netrc file
```
default
login jenkins
passwd secret
```
#### Example Session
### Troubleshooting
#### Error 409 Conflict
This can mean a lot of things, like a real conflict. However most of the time it means that the folder where the stuff is uploaded does not exist:
```
dav:/cmdb/Members/pti/> mput Dropbox/Apps/Byword/plone_webdav.md
Uploading Dropbox/Apps/Byword/plone_webdav.md to `/cmdb/...':
Progress: [=============================>] 100,0% of 1488 bytes failed:
409 Conflict
```
This actually tries to upload the file to a subfolder _Dropbox/Apps/Byword_which does not exist, causing this confusing error.
Simply changing the local directory solves the issue:
```
dav:/cmdb/Members/pti/> lcd Dropbox/Apps/Byword
dav:/cmdb/Members/pti/> mput plone_webdav.md
Uploading plone_webdav.md to `/cmdb/Members/pti/plone_webdav.md':
Progress: [=============================>] 100,0% of 1488 bytes succeeded.
```
#### Cannot create folders using WebDAV
Problem: The WebDAV "make folder" method, MKCOL, requires the "Add Folders" permission. This is not normally granted to Members or Owners on the site.
```
dav:/cmdb/Members/jenkins/> mkcol test2
Creating `test2': Authentication required for Zope on server `cmdb-uat.elex.be':
Username:
Password:
Retrying: Authentication required for Zope on server `cmdb-uat.elex.be':
Username: Terminated by signal 2.
```
Plone asks to login again because the current user has insufficient rights.
Workaround: In the Zope Management Interface, under the "Security" tab for the Plone root, check the "Owners" and "Managers" box for the "Add Folders" permission setting.
```
~ ᐅ cadaver dav://cmdb-uat.elex.be:1980/cmdb/Members/jenkins
dav:/cmdb/Members/jenkins/> mkcol test2
Creating `test2': succeeded.
dav:/cmdb/Members/jenkins/>
```
Source: [Members Can't Create Folders Through WebDAV](http://plone.org/documentation/error/unable-to-create-a-folder-through-webdav)
Automating Cadaver with **davpush.pl**
--------------------------------------
Cadaver uses an ftp like command language to interact with the WebDAV server. This is very flexible, but impractical when a large number of files and folders must be uploaded. This happens often when the documentation for a new release must replace the previous version.
Cadaver accepts its input on the **stdin** stream, which allows us to pipe a script of commands to it. Since it is non-trivial to create and maintain such a script by hand, a script generator is needed. The generator presented here is meant to be simple and easy to use and modify. No attempt was made to made to add advanced syncing (like removing deleted files), handle exceptions gracefully or 'do the right thing'.
With that in mind, organize the docs in such a way that it is easy to delete the target folder and push a fresh copy to clean everything up. This is common (and good) practice anyway in order to effectively use relative links within a subsite.
The principle is to **cd** to the root directory of the documentation root and run the script there and point it to the target.
### Usage
```
davpush.pl dav://_hostname_:_port_/_upload path_
```
Uploads all files and folders recursively to the WebDAV folder passed in the url.
### Code
```
#!/usr/bin/perl
use File::Find;
my $script = "";
sub wanted() {
my $f = $File::Find::name;
if (-f $f) {
$script .= "put $fn";
} else {
$script .= "cd $target_dirn";
$script .= "mkdir $fn";
$script .= "cd $fn"
}
}
my $url = $ARGV[0];
print "URL: $url";
if ($url =~ m#dav://.*?(/S*)#) {
my $target_url = "$0";
my $target_dir = "$1";
find({'wanted'=>&wanted, 'no_chdir' => 1}, ".");
$pid = open(POUT, "| cadaver $url");
print POUT $script;
print POUT "byen";
close POUT;
} else {
print "Usage: davpush.pl dav://<hostname>:<port>/<upload path>n";
print "n";
print "Uploads all files and folders recursively to the WebDAV folder passed in the url.";
}
```
### Code Notes
The standard perl **File::Find** module traverses the folder tree in the right order to make sure all folders are created before other files or folders are created in them. Default behavior is to **chdir** to the directory, but then we lose the nice paths relative from the root, which would require additional administration entering and leaving the directory. Setting the **no\_chdir** flag in the options keeps the paths like we want them in the script. (Look at the **preprocess** and **postprocess** options to help with the directory admin, but I think the added complexity will outweigh the gains for small to moderate trees)
For every file or folder, the **wanted** subroutine is called. For files we just add a **mput** command to copy the file over, because it keeps the path intact. If there is a file already (and the permissions are not screwed up) then it is overwritten. When we enter a new folder then we create the folder. If the folder already exists we get a (harmless) **405 Method Not Allowed** error. Here we make another offer to the _God of Simplicity_, and _ignore_ it.
After walking the tree, we have the script in the **$script** variable. It is unceremoniously piped as input for **cadaver**. We add the **bye** command to close the session, and we're done. The output of **cadaver** appears on the **stdout** for easy verification using a _MkI Eyeball_ check or by piping it to **grep**.
### Sample session
```
~/Dropbox/Apps/Byword ᐅ perl ~/tmp/davpush.pl dav://cmdb-uat.elex.be:1980/cmdb/Members/pti
URL: dav://cmdb-uat.elex.be:1980/cmdb/Members/ptiCreating `.': failed:
405 Method Not Allowed
Uploading ./plone_webdav.html to `/cmdb/Members/pti/plone_webdav.html':
Progress: [=============================>] 100,0% of 4007 bytes succeeded.
Uploading ./plone_webdav.md to `/cmdb/Members/pti/plone_webdav.md':
Progress: [=============================>] 100,0% of 6369 bytes succeeded.
Uploading ./Untitled.txt to `/cmdb/Members/pti/Untitled.txt':
Progress: [=============================>] 100,0% of 203 bytes succeeded.
Uploading ./Uploading to `/cmdb/Members/pti/Uploading': Could not open file: No such file or directory
Uploading documents to `/cmdb/Members/pti/documents': Could not open file: No such file or directory
Uploading to to `/cmdb/Members/pti/to': Could not open file: No such file or directory
Uploading Plone to `/cmdb/Members/pti/Plone': Could not open file: No such file or directory
Uploading with to `/cmdb/Members/pti/with': Could not open file: No such file or directory
Uploading WebDAV.md to `/cmdb/Members/pti/WebDAV.md': Could not open file: No such file or directory
Creating `./foo': failed:
405 Method Not Allowed
Creating `./foo/bar': failed:
405 Method Not Allowed
Creating `./foo/bar/baz': failed:
405 Method Not Allowed
Uploading ./foo/bar/baz/plone_webdav.md to `/cmdb/Members/pti/foo/bar/baz/plone_webdav.md':
Progress: [=============================>] 100,0% of 3380 bytes succeeded.
Creating `./images': failed:
405 Method Not Allowed
Uploading ./images/SJ09_1.jpg to `/cmdb/Members/pti/images/SJ09_1.jpg':
Progress: [=============================>] 100,0% of 31637 bytes succeeded.
Uploading ./images/SJ09_2.jpg to `/cmdb/Members/pti/images/SJ09_2.jpg':
Progress: [=============================>] 100,0% of 29182 bytes succeeded.
Uploading ./images/SJ09_3.jpg to `/cmdb/Members/pti/images/SJ09_3.jpg':
Progress: [=============================>] 100,0% of 31296 bytes succeeded.
Uploading ./images/SJ09_4.jpg to `/cmdb/Members/pti/images/SJ09_4.jpg':
Progress: [=============================>] 100,0% of 31094 bytes succeeded.
Uploading ./images/SJ09_5.jpg to `/cmdb/Members/pti/images/SJ09_5.jpg':
Progress: [=============================>] 100,0% of 26886 bytes succeeded.
Uploading ./images/SJ09_6.jpg to `/cmdb/Members/pti/images/SJ09_6.jpg':
Progress: [=============================>] 100,0% of 29373 bytes succeeded.
Uploading ./images/SJ09_7.jpg to `/cmdb/Members/pti/images/SJ09_7.jpg':
Progress: [=============================>] 100,0% of 34486 bytes succeeded.
Uploading ./images/SJ09_8.jpg to `/cmdb/Members/pti/images/SJ09_8.jpg':
Progress: [=============================>] 100,0% of 28561 bytes succeeded.
Uploading ./images/SJ09_9.jpg to `/cmdb/Members/pti/images/SJ09_9.jpg':
Progress: [=============================>] 100,0% of 27381 bytes succeeded.
Connection to `cmdb-uat.elex.be' closed.
```

View file

@ -0,0 +1,113 @@
---
title: 'Vraagstukken 5de leerjaar'
date: 2011-06-19T19:24:26.000Z
draft: false
---
Wiskunde Vraagstukken 5e leerjaar
=================================
Bij de voorbereiding voor de wiskunde proeven op het einde van het 5e leerjaar kwam Hendrik een aantal vraagstukken te kort zodat hij de uitkomsten al van buiten kende, en zich niet meer bekommerde om de manier waarop de uitkomst bekomen werd. Ik heb er dus een aantal bij verzonnen die voor hem heel herkenbaar waren. Dat vond hij fantastisch! Ik heb deze vragen dan maar geblogd op aandringen van Hendrik. ik vond dat een schitterend idee want dan ben ik er zeker van dat ik ze kan terugvinden als Emma en Lotte ze nodig hebben. En ondertussen kunnen anderen er ook plezier aan beleven.
Vraagstuk 1
-----------
Piet loopt 800m in 2m44.76 sec. Jan deed er 15.34 sec langer over.
Wat was de tijd van Jan?
Formule : .........................................................
Antwoord : ........................................................
...................................................................
Vraagstuk 2
-----------
We rijden van Antwerpen naar het Kaunertal (840km) in 8 uur tijd.
a) Wat was de gemiddelde snelheid?
Formule : .........................................................
Antwoord : ........................................................
...................................................................
b) We zijn onderweg 1u gestopt om te eten. Wat was de gemiddelde
snelheid toen toen we aan het rijden waren.
Formule : .........................................................
Antwoord : ........................................................
...................................................................
Vraagstuk 3
-----------
Hendrik loopt 3300m in 12min. Hoeveel km/u liep hij gemiddeld?
Formule : .........................................................
Antwoord : ........................................................
...................................................................
Vraagstuk 4
-----------
Een sportwagen rijdt 240km op een circuit in 1 u.
Een zeilvis zwemt 60km in de zee in 1u.
De snelheid van de auto en de zeilvis verhouden zich als ..... tot .....
Vraagstuk 5
-----------
Opa gaat 's maandags fietsen met de gepensioneerden
Hij vertrekt om 13h45 en rijdt een toertje van 60km. Ze hebben in
Postel een halfuurtje pauze genomen om een trappist te drinken.
Als de gepensioneerden gemiddeld 20km/u rijden, hoe laat is Opa dan
terug?
Formule : .........................................................
Antwoord : ........................................................
...................................................................
Vraagstuk 6
-----------
Zoek de kans :
We doen alle schaakstukken en een zak en schudden ermee tot ze
allemaal door elkaar gehusseld zijn. Lotte wordt geblindoekt en
haalt een stuk uit de zak.
Wat is de kans dat :
```
|--------------------+---------------+----+--------------|
|                    |               |    |              |
| een wit stuk is    | ............. | op | ............ |
|                    |               |    |              |
| een zwarte pion is | ............. | op | ............ |
|                    |               |    |              |
| een paard is       | ............. | op | ............ |
|                    |               |    |              |
| een koningin is    | ............. | op | ............ |
|                    |               |    |              |
| de witte koning is | ............. | op | ............ |
|--------------------+---------------+----+--------------|
```

13
content/contact.md Normal file
View file

@ -0,0 +1,13 @@
---
title: Contact Information
date: 2023-09-05
---
Peter Tillemans
Snamellit BV
Verbonstraat 57
2000 Antwerpen
VAT BE0892.789.787

33
content/home.md Normal file
View file

@ -0,0 +1,33 @@
---
title: 'Home'
date: 2021-11-28T12:57:12.000Z
draft: false
---
Brilliant design.
Simplicity at scale.
========================================
![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-1.jpg)
![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-2.jpg)
I'm Aabha, I like simple and clean design.
------------------------------------------
[PORTFOLIO](http://www.snamellit.com/gallery/)
![Image description](http://www.snamellit.com/wp-content/uploads/2021/11/image-3.jpg)
Experience delicate and beautiful design
----------------------------------------
[CONSULTATIONS](http://www.snamellit.com/contact/)
> “We are so grateful we found Aabha. Having her navigate the project made all the difference.”
>
> Paul F - San Fransisco
### Let me design your home
[Book a Consultation](http://www.snamellit.com/contact/)

3
public/css/main.css Normal file

File diff suppressed because one or more lines are too long

10
public/elasticlunr.min.js vendored Normal file

File diff suppressed because one or more lines are too long

BIN
public/images/mugshot.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

1
public/js/lang.js Normal file
View file

@ -0,0 +1 @@
function switchLang(n){document.getElementById("switch-lang-panel").classList.toggle("hidden")}document.addEventListener("DOMContentLoaded",function(){document.getElementById("switch-lang")?.addEventListener("click",switchLang)});

1
public/js/main.js Normal file
View file

@ -0,0 +1 @@
function switchTheme(){"dark"==([...document.documentElement.classList].includes("dark")?"dark":"light")?(localStorage.theme="light",document.documentElement.classList.remove("dark"),document.getElementById("light").classList.add("hidden"),document.getElementById("dark").classList.remove("hidden"),document.getElementById("syntax_highlight").href="/syntax-light.css"):(localStorage.theme="dark",document.documentElement.classList.add("dark"),document.getElementById("dark").classList.add("hidden"),document.getElementById("light").classList.remove("hidden"),document.getElementById("syntax_highlight").href="/syntax-dark.css")}function toggleSidebar(){let e=document.getElementById("sidebar");[...e.classList].includes("translate-x-0")?(document.body.style.removeProperty("overflow"),e.classList.remove("translate-x-0"),e.classList.add("-translate-x-full")):(document.body.style.setProperty("overflow","hidden"),e.classList.remove("-translate-x-full"),e.classList.add("translate-x-0"))}function toggleMobileMenu(){let e=document.querySelector("#mobile-menu div.nav-links");[...e.classList].includes("h-screen")?(document.body.classList.remove("overflow-hidden","relative"),document.documentElement.classList.remove("overscroll-none"),e.classList.remove("h-screen"),e.classList.add("h-0")):(document.body.classList.add("overflow-hidden","relative"),document.documentElement.classList.add("overscroll-none"),e.classList.remove("h-0"),e.classList.add("h-screen"))}document.addEventListener("DOMContentLoaded",function(){var e=document.querySelectorAll(".nav-links a");let t=window.location.href.replace(/\/$/,"");e=[...e].filter(e=>e.href===t||e.href===window.location.href);if(0!==e.length)for(var d of e)d.className="bg-gray-900 text-white px-3 py-2 rounded-md text-sm font-medium";"dark"===localStorage.theme||!("theme"in localStorage)&&window.matchMedia("(prefers-color-scheme: dark)").matches?(document.documentElement.classList.add("dark"),document.getElementById("dark").classList.add("hidden"),document.getElementById("syntax_highlight").href="/syntax-dark.css"):(document.documentElement.classList.remove("dark"),document.getElementById("light").classList.add("hidden"),document.getElementById("syntax_highlight").href="/syntax-light.css"),document.getElementById("switch-theme")?.addEventListener("click",switchTheme),document.getElementById("toggle-sidebar")?.addEventListener("click",toggleSidebar),document.getElementById("toggle-mobile-menu")?.addEventListener("click",toggleMobileMenu)});

1
public/js/page.js Normal file
View file

@ -0,0 +1 @@
function getActiveTocElement(e){return[...e].find(e=>e.getBoundingClientRect().y<=0)}function findCorrespondingTocTitle(n){return[...document.querySelectorAll("#toc li a")].find(e=>e.href.substring(e.href.indexOf("#"))==="#"+n.id)}document.addEventListener("DOMContentLoaded",function(){if(null!==document.getElementById("toc")){var e=document.querySelectorAll("#toc li a");let n=[];[...e].forEach(e=>{n.push(e.href.substring(e.href.indexOf("#")))});const i=document.querySelectorAll(n.join(","));let t=[...i].reverse();e=getActiveTocElement(t)||i[0];findCorrespondingTocTitle(e).classList.add("bg-blue-700");var o=e;window.addEventListener("scroll",()=>{var e=getActiveTocElement(t)||i[0];e!==o&&(findCorrespondingTocTitle(o).classList.remove("bg-blue-700"),findCorrespondingTocTitle(e).classList.add("bg-blue-700"),o=e)})}});

6
public/js/search.js Normal file
View file

@ -0,0 +1,6 @@
function toggleSearchModal(){const e=document.getElementById("search-modal");e.classList.toggle("opacity-0"),e.classList.toggle("pointer-events-none"),document.body.classList.toggle("search-active"),[...document.body.classList].includes("search-active")&&(document.getElementById("search-input").value="",document.getElementById("search-input").focus())}function formatResultItem(e){return console.log(e),htmlToElement(`<li class="flex hover:bg-gray-200 dark:hover:bg-gray-600 text-black dark:text-gray-200 p-2 rounded-lg border border-black dark:border-gray-200 bg-gray-200 dark:bg-gray-500 rounded-lg hover:shadow-xl mb-2">
<a href="${e.doc.path}">
<span class="text-xl text-bold">${e.doc.title}</span>
<span class="text-lg">${e.doc.description}</span>
</a>
</li>`)}function htmlToElement(e){let t=document.createElement("template");return e=e.trim(),t.innerHTML=e,t.content.firstChild}document.addEventListener("DOMContentLoaded",function(){let e=document.getElementById("search");e.addEventListener("click",function(e){e.preventDefault(),toggleSearchModal()});const t=document.querySelector(".modal-overlay");t.addEventListener("click",toggleSearchModal);let n=document.querySelectorAll(".modal-close");for(var o=0;o<n.length;o++)n[o].addEventListener("click",toggleSearchModal);document.onkeydown=function(e){let t=!1,n=!1;"key"in(e=e||window.event)?(t="Escape"===e.key||"Esc"===e.key,n="k"===e.key&&!0===e.metaKey):(n=75===e.keyCode&&e.metaKey,t=27===e.keyCode),n&&e.preventDefault(),(t&&document.body.classList.contains("search-active")||n)&&toggleSearchModal()};let l=elasticlunr.Index.load(window.searchIndex),a={bool:"AND",fields:{title:{boost:2},body:{boost:1}}},c,d,r=document.getElementById("search-input");document.getElementById("search-results");r.addEventListener("keyup",function(e){if([...document.body.classList].includes("search-active")&&3<r.value.trim().length&&(c=r.value.trim(),d=l.search(c,a),Array.isArray(d)&&0<d.length)){let e=document.getElementById("results-list");e.replaceChildren();for(o=0;o<d.length;o++){var t=formatResultItem(d[o]);e.appendChild(t)}}})});

File diff suppressed because one or more lines are too long

0
sass/_variables.sass Normal file
View file

BIN
static/images/mugshot.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB