diff --git a/CHANGES.md b/CHANGES.md index 1d78179..2464f93 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,33 @@ +## Version 9.13.0 du 07/02/2022-22:20 + +* `06686d9` pff: support nettoyage wsdl/ +* `9a018a6` pdev: ajout de --check +* `a932a1a` la branche par défaut est master en prod +* `cb09f4d` maj template yaml +* `f895222` foreach: rendre -g et -S compatibles +* `4f7bcbc` maj template CSS +* `5cdd93d` upassword: réintégrer password wom +* `7030b87` dk: ajouter mvn_action=install +* `8a682ed` upassword: changer le générateur aléatoire +* `a85cdf6` Maj des fichiers +* `d75783b` dmcerts: cosmetic +* `11e6021` dmcerts: certificat client avec la même durée que le CA +* `938307c` ajout dmcerts +* `8b49084` typo +* `98c1ac8` ajouter le support de bullseye +* `28d42b0` authftp: support des mots de passe avec des caractères spéciaux +* `dfdfd59` sélectionner java 1.6 par défaut pour worun +* `6af0359` pff: ajout de la commande normalisée new_cmd_amue_clean_libs +* `b3a44d0` pff: bug +* `111463a` pff: ajout AUTO_CMDS et NEW_CMDS, supprimer MKDIRS +* `fe839f4` woinst: bug +* `ba8e1a1` upasword: ajout des mot de passe javamonitor de webobjects +* `a75b91e` network: utiliser 127.0.1.1 pour dhcp +* `dfb5fc3` maj ulib/redhat +* `40c9cb9` supprimer docker/ qui est obsolète +* `580e20b` supprimer nulib de la distribution de nutools +* `da28d8b` pff: changer le format des fichiers de version + ## Version 9.12.0 du 29/03/2021-09:41 * `3d54e37` pff: bug origext diff --git a/VERSION.txt b/VERSION.txt index 2f0dbe6..69c3ec6 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -9.12.0 +9.13.0 diff --git a/authftp b/authftp index cefa98b..ad788b8 100755 --- a/authftp +++ b/authftp @@ -84,6 +84,7 @@ read_value -i "Entrez le chemin" path "$4" N if [ -n "$lftp" ]; then if [ -n "$noproxy" ]; then + export LFTP_PASSWORD="$password" if [ -n "$sftp" ]; then url="sftp://$host/$path" hostkeyfailed_apropos "$login@$host" @@ -92,12 +93,13 @@ if [ -n "$lftp" ]; then fi exec lftp "${options[@]}" -e "\ set ssl:verify-certificate $verify_certificate -open -u $login,$password $url" +open -u $login --env-password $url" else + export LFTP_PASSWORD="${password}@${my_password}" url="ftp://$AUTHFTP_PROXY_HOST/$path" exec lftp "${options[@]}" -e "\ set ssl:verify-certificate $verify_certificate -open -u ${login}@${my_login}@${host},${password}@${my_password} $url" +open -u ${login}@${my_login}@${host} --env-password $url" fi else if [ -n "$noproxy" ]; then diff --git a/dk b/dk index b56d086..c778c40 100755 --- a/dk +++ b/dk @@ -449,7 +449,7 @@ VARIABLES de update-apps.conf ORIGIN vaut 'origin' par défaut BRANCH - vaut 'develop' par défaut + vaut par défaut 'master' dans le profil prod, 'develop' sinon Pour toutes les variables de type BRANCH, utiliser la syntaxe ^COMMIT pour ignorer ORIGIN et sélectionner un commit en particulier TYPE @@ -494,9 +494,11 @@ VARIABLES de update-apps.conf MAVEN_ACTION vaut 'package' par défaut. Indique ce qu'il faut faire pour un projet de type 'maven' après avoir lancé les commandes de BEFORE_BUILD et avant - les commandes de AFTER_UPDATE. Les directives supportées sont 'package' - (alias de 'clean package'), 'package_only' qui ne lance pas de clean - avant le build, et 'none' + les commandes de AFTER_UPDATE. Les directives supportées sont: + - 'install' (alias de 'clean package install') + - 'package' (alias de 'clean package') + - 'package_only' qui ne lance pas de clean avant le build + - 'none' qui ne fait rien MAVEN_ARGS options à utiliser avec la commande 'mvn \$MAVEN_ACTION' @@ -845,8 +847,7 @@ function build_update_apps() { DEFAULT_ORIGIN="$UPDATE_APPS_ORIGIN" [ -z "$DEFAULT_ORIGIN" ] && DEFAULT_ORIGIN=origin DEFAULT_BRANCH="$UPDATE_APPS_BRANCH" - #XXX à terme, ne déployer en prod que la branche master - [ -z "$DEFAULT_BRANCH" -a "$PROFILE" == prod ] && DEFAULT_BRANCH=develop #XXX master + [ -z "$DEFAULT_BRANCH" -a "$PROFILE" == prod ] && DEFAULT_BRANCH=master [ -z "$DEFAULT_BRANCH" ] && DEFAULT_BRANCH=develop CLEAN= APPS=() @@ -1143,15 +1144,16 @@ function build_update_apps() { if [ -z "$BUILD_UPDATE_DEVEL" ]; then case "$maven_action" in + install|"clean package install"|cpi|i) maven_action="clean package install";; + package|"clean package"|cp|p) maven_action="clean package";; package_only|po) maven_action="package";; - "clean package"|package|cp|p) maven_action="clean package";; none|nop) maven_action=;; - *) ewarn "$maven_action: action invalide"; maven_action=;; + *) ewarn "$maven_action: action non standard. elle sera utilisée en l'état";; esac if [ -n "$maven_action" ]; then + estep "Compilation du projet maven" setx cwd=pwd cd "$DEST" - estep "Compilation du projet maven" auto_maven "${maven_vars[@]}" $maven_action "${maven_args[@]}" || { eend; return 1; } cd "$cwd" fi diff --git a/dmcerts b/dmcerts new file mode 100755 index 0000000..0b622f6 --- /dev/null +++ b/dmcerts @@ -0,0 +1,132 @@ +#!/bin/bash +# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 +source /etc/ulibauto || exit 1 +urequire install + +function display_help() { + uecho "$scriptname: gérer les certificats de docker-machine + +Les certificats créés par docker-machine ont une durée trop courte à mon goût. +Ce script permet de les recréer avec une durée de 30 ans pour le CA et le +certificat client, et de 10 ans pour chaque machine + +USAGE + $scriptname [host]" +} + +args=( + --help '$exit_with display_help' +) +parse_args "$@"; set -- "${args[@]}" + +dmdir="$HOME/.docker/machine" + +cadir="$dmdir/dmcerts" +cakey="$cadir/dmca.key" +cacsr="$cadir/dmca.csr" +cacrt="$cadir/dmca.crt" +cacrl="$cadir/dmca.crl" +if [ ! -d "$cadir" ]; then + enote "Le répertoire $cadir va être créé. Ne faites cela que si c'est la première fois. Sinon, récupérez le répertoire déjà créé par ailleurs" + ask_yesno "Voulez-vous continuer?" N || die + + mkdir "$cadir" + chmod 700 "$cadir" + mkdir "$cadir"/{openssl,newcerts} + touch "$cadir/index.txt" + echo 01 >"$cadir/serial" + + for src in "$scriptdir"/lib/dmcerts_openssl_*.cnf; do + dest="$cadir/openssl/${src#$scriptdir/lib/dmcerts_openssl_}" + cp "$src" "$dest" + done + sed -i "s/^dir = .*/dir = ${cadir//\//\\\/}/" "$cadir/openssl/"*.cnf + + openssl req -config "$cadir/openssl/ca.cnf" -batch -new -nodes -keyout "$cakey" -out "$cacsr" && + openssl ca -config "$cadir/openssl/ca.cnf" -batch -notext -out "$cacrt" -days 10950 -keyfile "$cakey" -selfsign -extensions v3_ca -infiles "$cacsr" && + openssl ca -config "$cadir/openssl/ca.cnf" -batch -gencrl -out "$cacrl" || + die "Une erreur s'est produite. Veuillez supprimer $cadir et recommencer" +fi + +clientkey="$cadir/dmclient.key" +clientcsr="$cadir/dmclient.csr" +clientcrt="$cadir/dmclient.crt" +clientcnf="$cadir/openssl/client.cnf" +if [ ! -f "$clientcrt" ]; then + #sed -i "s/^dir = .*/dir = ${cadir//\//\\\/}/" "$clientcnf" + + subj="/countryName=FR/stateOrProvinceName=La Reunion/localityName=Sainte Clotilde/organizationName=jclain/CN=dmclient/" + openssl req -config "$clientcnf" -batch -new -nodes -subj "$subj" -keyout "$clientkey" -out "$clientcsr" && + openssl ca -config "$clientcnf" -batch -keyfile "$cakey" -notext -out "$clientcrt" -infiles "$clientcsr" && + chmod 644 "$clientcrt" && + chmod 600 "$clientkey" && + rm -f "$clientcsr" || + die "Une erreur s'est produite pendant la génération du certificat client" +fi + +estep "Vérification certificats locaux" +copy_update "$cacrt" "$dmdir/certs/ca.pem" +copy_update "$cakey" "$dmdir/certs/ca-key.pem" +copy_update "$clientcrt" "$dmdir/certs/cert.pem" +copy_update "$clientkey" "$dmdir/certs/key.pem" + +first=1 +for host in "$@"; do + if [ -n "$first" ]; then + servercnf="$cadir/openssl/server.cnf" + #sed -i "s/^dir = .*/dir = ${cadir//\//\\\/}/" "$servercnf" + else + first= + fi + + etitle "$host" + machine="${host%%.*}" + machinedir="$dmdir/machines/$machine" + if [ ! -d "$machinedir" ]; then + eerror "$machine: machine inexistante" + fi + + serverkey="${machine}-server.key" + servercsr="${machine}-server.csr" + servercrt="${machine}-server.crt" + if [ ! -f "$servercrt" ]; then + subj="/countryName=FR/stateOrProvinceName=La Reunion/localityName=Sainte Clotilde/organizationName=jclain/CN=dmserver-$host/" + sans="DNS:$host,DNS:localhost" + sed -i "s/^subjectAltName = .*/subjectAltName = $sans/" "$servercnf" + openssl req -config "$servercnf" -batch -new -nodes -subj "$subj" -keyout "$serverkey" -out "$servercsr" && + openssl ca -config "$servercnf" -batch -keyfile "$cakey" -notext -out "$servercrt" -infiles "$servercsr" && + chmod 644 "$servercrt" && + chmod 600 "$serverkey" && + rm -f "$servercsr" || + die "Une erreur s'est produite pendant la génération du certificat serveur" + fi + + estep "machine: $machine" + ask_yesno "Voulez-vous remplacer les certificats distants et locaux?" O || continue + + estep "Copie distante" + scp "$cacrt" "root@$host:/etc/docker/ca.pem" || die + scp "$servercrt" "root@$host:/etc/docker/server.pem" || die + scp "$serverkey" "root@$host:/etc/docker/server-key.pem" || die + + estep "Redémarrage docker distant" + ssh "root@$host" "service docker restart" || die + + estep "Copie locale" + copy_update "$cacrt" "$machinedir/ca.pem" + copy_update "$cakey" "$machinedir/ca-key.pem" + copy_update "$clientcrt" "$machinedir/cert.pem" + copy_update "$clientkey" "$machinedir/key.pem" + copy_update "$servercrt" "$machinedir/server.pem" + copy_update "$serverkey" "$machinedir/server-key.pem" + + estep "Test de l'accès au serveur" + setx env=docker-machine env "$machine" || die + eval "$env" + echo "$machine docker version: $(docker system info -f '{{.ServerVersion}}')" + + estep "Suppression des fichiers temporaires" + rm -f "$servercrt" "$serverkey" +done + +enote "N'oubliez pas de re-exporter les docker-machines qui ont été regénérées" diff --git a/docker/.dockerignore b/docker/.dockerignore deleted file mode 100644 index dcf0102..0000000 --- a/docker/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -/b/t/ diff --git a/docker/.gitignore b/docker/.gitignore deleted file mode 100644 index 0797d89..0000000 --- a/docker/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/b/ -/build.env.local diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index 27bb578..0000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM ur/d9base - -ARG date -ARG version -LABEL date=$date version=$version - -ENV I=/install/nutools -COPY . $I -RUN $I/uinst -y $I && rm -rf $I - -ENV PATH="$PATH:/usr/local/nutools" -CMD ["/bin/bash", "-il"] diff --git a/docker/build b/docker/build deleted file mode 100755 index 65989cf..0000000 --- a/docker/build +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -source /etc/ulibauto || exit 1 - -function display_help() { - uecho "$scriptname: construire $NAME - -USAGE - $scriptname [options] - -OPTIONS - -b, --build - -n, --no-cache" -} - -function do_prereqs() { - : -} -function do_build() { - local date; local -a args - setx date=date +%y%m%d - args=( - -f Dockerfile - -t $NAME:latest - --build-arg "date=$date" - ${no_cache:+--no-cache} - ) - - etitle "Création de l'image docker $NAME" \ - docker build "${args[@]}" "$CTXDIR" -} -function do_run() { - docker run -it --rm "$NAME" -} - -cd "$scriptdir" -source ./build.env || die "Impossible de trouver build.env" -[ -f build.env.local ] && source build.env.local - -auto=1 -prereqs= -build= -no_cache= -run= -args=( - --help '$exit_with display_help' - --prereqs '$prereqs=1; auto=' - -b,--build '$build=1; auto=' - -n,--no-cache no_cache=1 - -r,--run '$run=1; auto=' -) -parse_args "$@"; set -- "${args[@]}" - -if [ -n "$prereqs" ]; then - build= - run= -elif [ -n "$auto" ]; then - build=1 -fi - -if [ -n "$prereqs" ]; then - do_prereqs "$@" || die -fi - -if [ -n "$build" ]; then - do_build "$@" || die -fi - -if [ -n "$run" ]; then - do_run "$@" || die -fi - diff --git a/docker/build.env b/docker/build.env deleted file mode 100644 index 4b53837..0000000 --- a/docker/build.env +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -# Nom de l'image -NAME=ur/nutools -# Build context pour docker -CTXDIR=.. -# Répertoire contenant les fichiers nécessaire au build -BUILDDIR=b -# Répertoire temporaire pour construire les fichiers de BUILDDIR. Ce répertoire -# n'est pas transmis au daemon docker -TRANSDIR=b/t - -# Fonctions spécifiques -#function do_prereqs() { :; } -#function do_build() { :; } -#function do_run() { :; } diff --git a/foreach b/foreach index 445c008..6f4fc2c 100755 --- a/foreach +++ b/foreach @@ -83,7 +83,7 @@ OPTIONS répertoire parent (valide uniquement avec l'option -p) -S, --git-status Equivalent à spécifier la commande 'git status --p' - Utiliser avec -G ou -C + Utiliser par exemple avec -g, -G ou -C -G, --git-projects Equivalent à '--ptitle -p */.git --' e.g '$scriptname -G git pull' pour mettre à jour les dépôts situés dans un répertoire @@ -124,7 +124,11 @@ parse_args "$@"; set -- "${args[@]}" case "$command" in git-status-p) - set -- git status --p "$@" + if [ -n "$shortcut" ]; then + set -- git status --p "$@" + else + set -- "$@" -- git status --p + fi ;; esac case "$shortcut" in diff --git a/lib/dmcerts_openssl_ca.cnf b/lib/dmcerts_openssl_ca.cnf new file mode 100644 index 0000000..9275819 --- /dev/null +++ b/lib/dmcerts_openssl_ca.cnf @@ -0,0 +1,106 @@ +# -*- coding: utf-8 mode: conf -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 + +[ca] +default_ca = dmca + +[dmca] +dir = @@cadir@@ +certs = $dir/certs +crl_dir = $dir/crl +database = $dir/index.txt +unique_subject = no +new_certs_dir = $dir/newcerts + +certificate = $dir/dmca.crt +serial = $dir/serial +crl = $dir/dmca.crl +private_key = $dir/dmca.key +RANDFILE = $dir/private/.rand + +x509_extensions = usr_cert + +name_opt = ca_default +cert_opt = ca_default + +default_days = 10950 +default_crl_days = 30 +default_md = sha256 +preserve = no + +policy = policy_match + +[policy_match] +countryName = match +stateOrProvinceName = match +organizationName = match +organizationalUnitName = optional +commonName = supplied +emailAddress = optional + +[req] +default_bits = 2048 +default_md = sha256 +default_keyfile = privkey.pem +distinguished_name = req_distinguished_name +attributes = req_attributes +x509_extensions = v3_ca +string_mask = utf8only + +[req_distinguished_name] +countryName = Country Name (2 letter code) +countryName_default = FR +countryName_min = 2 +countryName_max = 2 + +stateOrProvinceName = State or Province Name (full name) +stateOrProvinceName_default = La Reunion + +localityName = Locality Name (eg, city) +localityName_default = Sainte-Clotilde + +0.organizationName = Organization Name (eg, company) +0.organizationName_default = jclain + +organizationalUnitName = Organizational Unit Name (eg, section) +organizationalUnitName_default = + +commonName = Common Name (eg, your name or your server\'s hostname) +commonName_default = dmca +commonName_max = 64 + +emailAddress = Email Address +emailAddress_default = jephte.clain@univ-reunion.fr +emailAddress_max = 64 + +[req_attributes] +challengePassword = A challenge password +challengePassword_min = 4 +challengePassword_max = 20 + +unstructuredName = dmca + +[usr_cert] + +basicConstraints=CA:FALSE + +nsComment = "dmca certificate" + +subjectKeyIdentifier=hash +authorityKeyIdentifier=keyid,issuer:always + +[v3_req] + +basicConstraints = CA:FALSE +keyUsage = nonRepudiation,digitalSignature,keyEncipherment + +[v3_ca] + +#subjectKeyIdentifier=hash +#authorityKeyIdentifier=keyid:always,issuer +basicConstraints = critical,CA:true +keyUsage = critical,keyCertSign,digitalSignature,keyEncipherment,keyAgreement + +[crl_ext] + +#issuerAltName=issuer:copy +authorityKeyIdentifier=keyid:always diff --git a/lib/dmcerts_openssl_client.cnf b/lib/dmcerts_openssl_client.cnf new file mode 100644 index 0000000..8fd6c69 --- /dev/null +++ b/lib/dmcerts_openssl_client.cnf @@ -0,0 +1,99 @@ +# -*- coding: utf-8 mode: conf -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 + +[ca] +default_ca = dmca + +[dmca] +dir = @@cadir@@ +certs = $dir/certs +crl_dir = $dir/crl +database = $dir/index.txt +unique_subject = no +new_certs_dir = $dir/newcerts + +certificate = $dir/dmca.crt +serial = $dir/serial +crl = $dir/dmca.crl +private_key = $dir/dmca.key +RANDFILE = $dir/private/.rand + +x509_extensions = usr_ext + +name_opt = ca_default +cert_opt = ca_default + +copy_extensions = copy + +default_days = 10950 +default_crl_days = 30 +default_md = sha256 +preserve = no + +policy = policy_match + +[policy_match] +#countryName = match +#stateOrProvinceName = match +#organizationName = match +countryName = supplied +stateOrProvinceName = supplied +organizationName = supplied +organizationalUnitName = optional +commonName = supplied +emailAddress = optional + +[req] +default_bits = 2048 +default_md = sha256 +default_keyfile = privkey.pem +distinguished_name = req_distinguished_name +attributes = req_attributes +x509_extensions = usr_ext +string_mask = utf8only + +#input_password = secret +#output_password = secret + +[req_distinguished_name] +countryName = Country Name (2 letter code) +countryName_default = FR +countryName_min = 2 +countryName_max = 2 + +stateOrProvinceName = State or Province Name (full name) +stateOrProvinceName_default = La Reunion + +localityName = Locality Name (eg, city) +localityName_default = Sainte-Clotilde + +0.organizationName = Organization Name (eg, company) +0.organizationName_default = jclain + +organizationalUnitName = Organizational Unit Name (eg, section) +organizationalUnitName_default = + +commonName = Common Name (eg, your name or your server\'s hostname) +commonName_max = 64 + +emailAddress = Email Address +emailAddress_max = 64 + +[req_attributes] +challengePassword = A challenge password +challengePassword_min = 4 +challengePassword_max = 20 + +unstructuredName = jclain + +[usr_ext] + +basicConstraints=critical,CA:FALSE +#subjectKeyIdentifier=hash +#authorityKeyIdentifier=keyid,issuer:always +keyUsage = critical,digitalSignature +extendedKeyUsage = clientAuth + +[crl_ext] + +#issuerAltName=issuer:copy +authorityKeyIdentifier=keyid:always diff --git a/lib/dmcerts_openssl_server.cnf b/lib/dmcerts_openssl_server.cnf new file mode 100644 index 0000000..ddcb5cc --- /dev/null +++ b/lib/dmcerts_openssl_server.cnf @@ -0,0 +1,101 @@ +# -*- coding: utf-8 mode: conf -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 + +[ca] +default_ca = dmca + +[dmca] +dir = @@cadir@@ +certs = $dir/certs +crl_dir = $dir/crl +database = $dir/index.txt +unique_subject = no +new_certs_dir = $dir/newcerts + +certificate = $dir/dmca.crt +serial = $dir/serial +crl = $dir/dmca.crl +private_key = $dir/dmca.key +RANDFILE = $dir/private/.rand + +x509_extensions = usr_ext + +name_opt = ca_default +cert_opt = ca_default + +copy_extensions = copy + +default_days = 3650 +default_crl_days = 30 +default_md = sha256 +preserve = no + +policy = policy_match + +[policy_match] +#countryName = match +#stateOrProvinceName = match +#organizationName = match +countryName = supplied +stateOrProvinceName = supplied +organizationName = supplied +organizationalUnitName = optional +commonName = supplied +emailAddress = optional + +[req] +default_bits = 2048 +default_md = sha256 +default_keyfile = privkey.pem +distinguished_name = req_distinguished_name +attributes = req_attributes +x509_extensions = usr_ext +string_mask = utf8only + +#input_password = secret +#output_password = secret + +[req_distinguished_name] +countryName = Country Name (2 letter code) +countryName_default = FR +countryName_min = 2 +countryName_max = 2 + +stateOrProvinceName = State or Province Name (full name) +stateOrProvinceName_default = La Reunion + +localityName = Locality Name (eg, city) +localityName_default = Sainte-Clotilde + +0.organizationName = Organization Name (eg, company) +0.organizationName_default = jclain + +organizationalUnitName = Organizational Unit Name (eg, section) +organizationalUnitName_default = + +commonName = Common Name (eg, your name or your server\'s hostname) +commonName_max = 64 + +emailAddress = Email Address +emailAddress_max = 64 + +[req_attributes] +challengePassword = A challenge password +challengePassword_min = 4 +challengePassword_max = 20 + +unstructuredName = jclain + +[usr_ext] + +basicConstraints = critical,CA:FALSE +#subjectKeyIdentifier=hash +#authorityKeyIdentifier=keyid,issuer:always +keyUsage = critical,digitalSignature,keyEncipherment,keyAgreement +extendedKeyUsage = serverAuth + +subjectAltName = DNS:localhost + +[crl_ext] + +#issuerAltName=issuer:copy +authorityKeyIdentifier=keyid:always diff --git a/lib/nulib/.gitignore b/lib/nulib/.gitignore deleted file mode 100644 index e69de29..0000000 diff --git a/lib/nulib/.project b/lib/nulib/.project deleted file mode 100644 index 9b2fc83..0000000 --- a/lib/nulib/.project +++ /dev/null @@ -1,17 +0,0 @@ - - - nulib - - - - - - org.python.pydev.PyDevBuilder - - - - - - org.python.pydev.pythonNature - - diff --git a/lib/nulib/.pydevproject b/lib/nulib/.pydevproject deleted file mode 100644 index 07dfe2b..0000000 --- a/lib/nulib/.pydevproject +++ /dev/null @@ -1,8 +0,0 @@ - - -python interpreter -Default - -/${PROJECT_DIR_NAME}/python - - diff --git a/lib/nulib/.settings/org.eclipse.core.resources.prefs b/lib/nulib/.settings/org.eclipse.core.resources.prefs deleted file mode 100644 index 90e82f1..0000000 --- a/lib/nulib/.settings/org.eclipse.core.resources.prefs +++ /dev/null @@ -1,5 +0,0 @@ -eclipse.preferences.version=1 -encoding//python/nulib/web/bootstrap.py=utf-8 -encoding//python/nulib/web/model.py=utf-8 -encoding//python/nulib/web/ui.py=utf-8 -encoding/=UTF-8 diff --git a/lib/nulib/.settings/org.eclipse.core.runtime.prefs b/lib/nulib/.settings/org.eclipse.core.runtime.prefs deleted file mode 100644 index 5a0ad22..0000000 --- a/lib/nulib/.settings/org.eclipse.core.runtime.prefs +++ /dev/null @@ -1,2 +0,0 @@ -eclipse.preferences.version=1 -line.separator=\n diff --git a/lib/nulib/MANIFEST.in b/lib/nulib/MANIFEST.in deleted file mode 100644 index 73dd970..0000000 --- a/lib/nulib/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -global-include * -global-exclude *.pyc -exclude MANIFEST -prune dist diff --git a/lib/nulib/TODO.md b/lib/nulib/TODO.md deleted file mode 100644 index 458a07e..0000000 --- a/lib/nulib/TODO.md +++ /dev/null @@ -1,180 +0,0 @@ -# TODO - -## Améliorer rtoinst - -ajouter le support de pffprofile pour deploydb - -## Refaire/repenser les fonctions evalX - -dans chaque exemple, on affiche l'invocation de evalX suivi de l'équivalent en -syntaxe standard - -- evala permet de traiter des tableaux - ~~~ - evala with array // add a b c - array=("${array[@]}" a b c) - ~~~ - les fonctions à utiliser pour le traitement sont configurées avec des - variables spéciales. par exemple, on peut indiquer que la commande add - ci-dessus est en réalité gérée par la fonction array_add et que c'est une - commande de type modify qui prend en premier argument le nom du tableau: - ~~~ - __evala_add_func=array_add - __evala_add_type=m - __evala_add_arg=first - ~~~ - en fonction du type de fonction, les arguments supplémentaires supportés sont - différents. par défaut, la fonction à utiliser est du même nom que la - commande, est du type scalar, et prend comme argument @ - Ainsi les deux commandes suivantes sont équivalentes: - ~~~ - evala with array // echo - echo "${array[@]}" - ~~~ - et assument les définitions suivantes: - ~~~ - __evala_echo_func=echo - __evala_echo_type=s - __evala_echo_arg=@ - ~~~ - -- evalx permet d'utiliser toutes ces fonctions ensemble - ~~~ - evalx seq 5 //p grep -v 3 //a prepend prefix // append suffix //c echo - array=($(seq 5 | grep -v 3)); array=(prefix "${array[@]}"); array=("${array[@]}" suffix); echo "${array[@]}" - - # à partir du mode evala, on peut exécuter directement les arguments du - # tableau comme une commande en terminant par //c - evalx -a with array // prepend echo //c - array=(echo "${array[@]}"); "${array[@]}" - ~~~ - evalx commence par défaut en mode evalc. il est possible avec les options -i, - -s, -a, -c, -p, -m de forcer respectivement evali, evals, evala, evalc, evalp, - evalm - -- Il faudra réfléchir à comment sortir du mode evalm pour utilisation avec - evalx. ou alors on part du principe que evalm est toujours en fin de chaine. - -## Faire la fonction cmdx - -cmdx permet de lancer une commande avec les arguments qui sont générés par -evalx. cmdx commence par défaut en mode evalm. Par exemple, les deux commandes -suivantes sont équivalentes: -~~~ -cmdx etitle //"Copie de " basename "$src" //" vers " ppath "$dest" -etitle "Copie de $(basename "$src") vers $(ppath "$dest")" -~~~ - -Comme pour evalx, les options -i, -s, -a, -c, -p, -m permettent de forcer -respectivement les modes evali, evals, evala, evalc, evalp, evalm. Par exemple -les deux commandes suivantes sont équivalentes: -~~~ -cmdx -c echo a // b -echo "$(b "$(a)")" -~~~ - -## Faire la fonction checkx - -checkx permet de tester le résultat d'une commande evalx. elle s'utilise de -cette manière: -~~~ -checkx cmds... OP VALUE -~~~ - -Les opérateurs sont de la forme: -~~~ -is -n|notempty -is -z|empty -is ok -is ko -== value -!= value -etc. -~~~ - -checkx remplace testx avec une syntaxe plus naturelle. si aucun script -n'utilise les fonctions testx, peut-être peut-on simplement supprimer les -fonctions testx et renommer checkx en testx - -Comme pour evalx, les options -i, -s, -a, -c, -p, -m permettent de forcer -respectivement les modes evali, evals, evala, evalc, evalp, evalm. Par exemple -les deux commandes suivantes sont équivalentes: -~~~ -checkx -p a // b == c -[ "$(evalp a // b)" == c ] -~~~ - -Les commande suivantes sont équivalentes deux à deux: -~~~ -checkx cmd is -z -[ -z "$(evalx cmd)" ] - -checkx cmd is ok -evalx cmd; [ $? -eq 0 ] - -checkx cmd is ko -evalx cmd; [ $? -ne 0 ] - -checkx cmd == value -[ "$(evalx cmd)" == value ] -~~~ - -## Faire la fonction storex - -storex permet de mettre le résultat d'une fonction evalx dans une variable ou de -l'ajouter à un tableau. l'idée est d'avoir la même syntaxe que checkx. je ne -suis pas encore tout à fait sûr que ce soit une bonne chose. - -Les commande suivantes sont équivalentes deux à deux: -~~~ -storex cmd to var -var="$(evalx cmd)" - -storex cmd to var -setx var=cmd - -storex -a cmd to array -array_add array "$(evalx cmd)" - -storex -r cmd from array -array_del array "$(evalx cmd)" -~~~ - -syntaxes alternatives -~~~ -storex cmd to var -addx cmd to array -removex cmd from array -~~~ - -alternatives -~~~ -setx var=cmd -evalx cmd // array_add array -evalx cmd // array_del array -~~~ - -note: il ne semble pas nécessaire au vu de l'alternative d'implémenter storex, -addx, removex. - -par contre, il faut corriger un bug d'evalc: la dernière commande doit être -exécutée telle quelle. en effet, -~~~ -evalc a // b -~~~ -devrait être équivalent à -~~~ -b "$(a)" -~~~ -mais en fait c'est plutôt -~~~ -echo "$(b "$(a)")" -~~~ -et ça pose problème, notamment si b initialise des variables, etc. - -## Fonctions diverses - -`retcode cmd` -: affiche le code de retour de cmd. équivalent à `cmd; echo $?` - --*- coding: utf-8 mode: markdown -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8:noeol:binary \ No newline at end of file diff --git a/lib/nulib/awk/base b/lib/nulib/awk/base deleted file mode 100644 index b62f5c3..0000000 --- a/lib/nulib/awk/base +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -@include "base.core" -@include "base.array" -@include "base.date" diff --git a/lib/nulib/awk/base.array b/lib/nulib/awk/base.array deleted file mode 100644 index bd5ac32..0000000 --- a/lib/nulib/awk/base.array +++ /dev/null @@ -1,157 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -function mkindices(values, indices, i, j) { - array_new(indices) - j = 1 - for (i in values) { - indices[j++] = int(i) - } - return asort(indices) -} -function array_new(dest) { - dest[0] = 0 # forcer awk à considérer dest comme un tableau - delete dest -} -function array_newsize(dest, size, i) { - dest[0] = 0 # forcer awk à considérer dest comme un tableau - delete dest - size = int(size) - for (i = 1; i <= size; i++) { - dest[i] = "" - } -} -function array_len(values, count, i) { - # length(array) a un bug sur awk 3.1.5 - # cette version est plus lente mais fonctionne toujours - count = 0 - for (i in values) { - count++ - } - return count -} -function array_copy(dest, src, count, indices, i) { - array_new(dest) - count = mkindices(src, indices) - for (i = 1; i <= count; i++) { - dest[indices[i]] = src[indices[i]] - } -} -function array_getlastindex(src, count, indices) { - count = mkindices(src, indices) - if (count == 0) return 0 - return indices[count] -} -function array_add(dest, value, lastindex) { - lastindex = array_getlastindex(dest) - dest[lastindex + 1] = value -} -function array_deli(dest, i, l) { - i = int(i) - if (i == 0) return - l = array_len(dest) - while (i < l) { - dest[i] = dest[i + 1] - i++ - } - delete dest[l] -} -function array_del(dest, value, ignoreCase, i) { - do { - i = key_index(value, dest, ignoreCase) - if (i != 0) array_deli(dest, i) - } while (i != 0) -} -function array_extend(dest, src, count, lastindex, indices, i) { - lastindex = array_getlastindex(dest) - count = mkindices(src, indices) - for (i = 1; i <= count; i++) { - dest[lastindex + i] = src[indices[i]] - } -} -function array_fill(dest, i) { - array_new(dest) - for (i = 1; i <= NF; i++) { - dest[i] = $i - } -} -function array_getline(src, count, indices, i, j) { - $0 = "" - count = mkindices(src, indices) - for (i = 1; i <= count; i++) { - j = indices[i] - $j = src[j] - } -} -function array_appendline(src, count, indices, i, nf, j) { - count = mkindices(src, indices) - nf = NF - for (i = 1; i <= count; i++) { - j = nf + indices[i] - $j = src[indices[i]] - } -} -function in_array(value, values, ignoreCase, i) { - if (ignoreCase) { - value = tolower(value) - for (i in values) { - if (tolower(values[i]) == value) return 1 - } - } else { - for (i in values) { - if (values[i] == value) return 1 - } - } - return 0 -} -function key_index(value, values, ignoreCase, i) { - if (ignoreCase) { - value = tolower(value) - for (i in values) { - if (tolower(values[i]) == value) return int(i) - } - } else { - for (i in values) { - if (values[i] == value) return int(i) - } - } - return 0 -} -function array2s(values, prefix, sep, suffix, noindices, first, i, s) { - if (!prefix) prefix = "[" - if (!sep) sep = ", " - if (!suffix) suffix = "]" - s = prefix - first = 1 - for (i in values) { - if (first) first = 0 - else s = s sep - if (!noindices) s = s "[" i "]=" - s = s values[i] - } - s = s suffix - return s -} -function array2so(values, prefix, sep, suffix, noindices, count, indices, i, s) { - if (!prefix) prefix = "[" - if (!sep) sep = ", " - if (!suffix) suffix = "]" - s = prefix - count = mkindices(values, indices) - for (i = 1; i <= count; i++) { - if (i > 1) s = s sep - if (!noindices) s = s "[" indices[i] "]=" - s = s values[indices[i]] - } - s = s suffix - return s -} -function array_join(values, sep, prefix, suffix, count, indices, i, s) { - s = prefix - count = mkindices(values, indices) - for (i = 1; i <= count; i++) { - if (i > 1) s = s sep - s = s values[indices[i]] - } - s = s suffix - return s -} diff --git a/lib/nulib/awk/base.core b/lib/nulib/awk/base.core deleted file mode 100644 index 49a4b58..0000000 --- a/lib/nulib/awk/base.core +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -function num(s) { - if (s ~ /^[0-9]+$/) return int(s) - else return s -} -function ord(s, i) { - s = substr(s, 1, 1) - i = index(" !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~", s) - if (i != 0) i += 32 - 1 - return i -} -function hex(i, s) { - s = sprintf("%x", i) - if (length(s) < 2) s = "0" s - return s -} -function qhtml(s) { - gsub(/&/, "\\&", s) - gsub(/"/, "\\"", s) - gsub(/>/, "\\>", s) - gsub(/", s) - gsub(/"/, "\"", s) - gsub(/&/, "\\&", s) - return s -} -function qawk(s) { - gsub(/\\/, "\\\\", s) - gsub(/"/, "\\\"", s) - gsub(/\n/, "\\n", s) - return "\"" s "\"" -} -function qval(s) { - gsub(/'/, "'\\''", s) - return "'" s "'" -} -function sqval(s) { - return " " qval(s) -} -function qvals( i, line) { - line = "" - for (i = 1; i <= NF; i++) { - if (i > 1) line = line " " - line = line qval($i) - } - return line -} -function sqvals() { - return " " qvals() -} -function qarr(values, prefix, i, count, line) { - line = prefix - count = array_len(values) - for (i = 1; i <= count; i++) { - if (i > 1 || line != "") line = line " " - line = line qval(values[i]) - } - return line -} -function qregexp(s) { - gsub(/[[\\.^$*+?()|{]/, "\\\\&", s) - return s -} -function qsubrepl(s) { - gsub(/\\/, "\\\\", s) - gsub(/&/, "\\\\&", s) - return s -} -function qgrep(s) { - gsub(/[[\\.^$*]/, "\\\\&", s) - return s -} -function qegrep(s) { - gsub(/[[\\.^$*+?()|{]/, "\\\\&", s) - return s -} -function qsql(s, suffix) { - gsub(/'/, "''", s) - return "'" s "'" (suffix != ""? " " suffix: "") -} -function cqsql(s, suffix) { - return "," qsql(s, suffix) -} -function unquote_mysqlcsv(s) { - gsub(/\\n/, "\n", s) - gsub(/\\t/, "\t", s) - gsub(/\\0/, "\0", s) - gsub(/\\\\/, "\\", s) - return s -} -function sval(s) { - if (s == "") return s - else return " " s -} -function cval(s, suffix) { - suffix = suffix != ""? " " suffix: "" - if (s == "") return s - else return "," s suffix -} - -function printto(s, output) { - if (output == "") { - print s - } else if (output ~ /^>>/) { - sub(/^>>/, "", output) - print s >>output - } else if (output ~ /^>/) { - sub(/^>/, "", output) - print s >output - } else if (output ~ /^\|&/) { - sub(/^\|&/, "", output) - print s |&output - } else if (output ~ /^\|/) { - sub(/^\|/, "", output) - print s |output - } else { - print s >output - } -} -function find_line(input, field, value, orig, line) { - orig = $0 - line = "" - while ((getline 0) { - if ($field == value) { - line = $0 - break - } - } - close(input) - $0 = orig - return line -} -function merge_line(input, field, key, line) { - line = find_line(input, field, $key) - if (line != "") $0 = $0 FS line -} diff --git a/lib/nulib/awk/base.date b/lib/nulib/awk/base.date deleted file mode 100644 index 48e3eff..0000000 --- a/lib/nulib/awk/base.date +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -function date__parse_fr(date, parts, y, m, d) { - if (match(date, /([0-9][0-9]?)\/([0-9][0-9]?)\/([0-9][0-9][0-9][0-9])/, parts)) { - y = int(parts[3]) - m = int(parts[2]) - d = int(parts[1]) - return mktime(sprintf("%04i %02i %02i 00 00 00 +0400", y, m, d)) - } else if (match(date, /([0-9][0-9]?)\/([0-9][0-9]?)\/([0-9][0-9])/, parts)) { - basey = int(strftime("%Y")); basey = basey - basey % 100 - y = basey + int(parts[3]) - m = int(parts[2]) - d = int(parts[1]) - return mktime(sprintf("%04i %02i %02i 00 00 00 +0400", y, m, d)) - } - return -1 -} -function date__parse_mysql(date, parts, y, m, d) { - if (match(date, /([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])/, parts)) { - y = int(parts[1]) - m = int(parts[2]) - d = int(parts[3]) - return mktime(sprintf("%04i %02i %02i 00 00 00 +0400", y, m, d)) - } - return -1 -} -function date__parse_any(date, serial) { - serial = date__parse_fr(date) - if (serial == -1) serial = date__parse_mysql(date) - return serial -} -function date_serial(date) { - return date__parse_any(date) -} -function date_parse(date, serial) { - serial = date__parse_any(date) - if (serial == -1) return date - return strftime("%d/%m/%Y", serial) -} -function date_monday(date, serial, dow) { - serial = date__parse_any(date) - if (serial == -1) return date - dow = strftime("%u", serial) - serial -= (dow - 1) * 86400 - return strftime("%d/%m/%Y", serial) -} -function date_add(date, nbdays, serial) { - serial = date__parse_any(date) - if (serial == -1) return date - serial += nbdays * 86400 - return strftime("%d/%m/%Y", serial) -} diff --git a/lib/nulib/awk/csv b/lib/nulib/awk/csv deleted file mode 100644 index 7f16be4..0000000 --- a/lib/nulib/awk/csv +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -@include "base.core" -@include "base.array" - -function csv__parse_quoted(line, destl, colsep, qchar, echar, pos, tmpl, nextc, resl) { - line = substr(line, 2) - resl = "" - while (1) { - pos = index(line, qchar) - if (pos == 0) { - # chaine mal terminee - resl = resl line - destl[0] = "" - destl[1] = 0 - return resl - } - if (echar != "" && pos > 1) { - # tenir compte du fait qu"un caratère peut être mis en échappement - prevc = substr(line, pos - 1, 1) - quotec = substr(line, pos, 1) - nextc = substr(line, pos + 1, 1) - if (prevc == echar) { - # qchar en échappement - tmpl = substr(line, 1, pos - 2) - resl = resl tmpl quotec - line = substr(line, pos + 1) - continue - } - tmpl = substr(line, 1, pos - 1) - if (nextc == colsep || nextc == "") { - # fin de champ ou fin de ligne - resl = resl tmpl - destl[0] = substr(line, pos + 2) - destl[1] = nextc == colsep - return resl - } else { - # erreur de syntaxe: guillemet non mis en échappement - # ignorer cette erreur et prendre le guillemet quand meme - resl = resl tmpl quotec - line = substr(line, pos + 1) - } - } else { - # pas d"échappement pour qchar. il est éventuellement doublé - tmpl = substr(line, 1, pos - 1) - quotec = substr(line, pos, 1) - nextc = substr(line, pos + 1, 1) - if (nextc == colsep || nextc == "") { - # fin de champ ou fin de ligne - resl = resl tmpl - destl[0] = substr(line, pos + 2) - destl[1] = nextc == colsep - return resl - } else if (nextc == qchar) { - # qchar en echappement - resl = resl tmpl quotec - line = substr(line, pos + 2) - } else { - # erreur de syntaxe: guillemet non mis en échappement - # ignorer cette erreur et prendre le guillemet quand meme - resl = resl tmpl quotec - line = substr(line, pos + 1) - } - } - } -} -function csv__parse_unquoted(line, destl, colsep, qchar, echar, pos) { - pos = index(line, colsep) - if (pos == 0) { - destl[0] = "" - destl[1] = 0 - return line - } else { - destl[0] = substr(line, pos + 1) - destl[1] = 1 - return substr(line, 1, pos - 1) - } -} -function csv__array_parse(fields, line, nbfields, colsep, qchar, echar, shouldparse, destl, i) { - array_new(fields) - array_new(destl) - i = 1 - shouldparse = 0 - # shouldparse permet de gérer le cas où un champ vide est en fin de ligne. - # en effet, après "," il faut toujours parser, même si line=="" - while (shouldparse || line != "") { - if (index(line, qchar) == 1) { - value = csv__parse_quoted(line, destl, colsep, qchar, echar) - line = destl[0] - shouldparse = destl[1] - } else { - value = csv__parse_unquoted(line, destl, colsep, qchar, echar) - line = destl[0] - shouldparse = destl[1] - } - fields[i] = value - i = i + 1 - } - if (nbfields) { - nbfields = int(nbfields) - i = array_len(fields) - while (i < nbfields) { - i++ - fields[i] = "" - } - } - return array_len(fields) -} -BEGIN { - DEFAULT_COLSEP = "," - DEFAULT_QCHAR = "\"" - DEFAULT_ECHAR = "" -} -function array_parsecsv2(fields, line, nbfields, colsep, qchar, echar) { - return csv__array_parse(fields, line, nbfields, colsep, qchar, echar) -} -function array_parsecsv(fields, line, nbfields, colsep, qchar, echar) { - if (colsep == "") colsep = DEFAULT_COLSEP - if (qchar == "") qchar = DEFAULT_QCHAR - if (echar == "") echar = DEFAULT_ECHAR - return csv__array_parse(fields, line, nbfields, colsep, qchar, echar) -} -function parsecsv(line, fields) { - array_parsecsv(fields, line) - array_getline(fields) - return NF -} -function getlinecsv(file, fields) { - if (file) { - getline 1) line = line colsep - if (qchar != "" && index(value, qchar) != 0) { - if (echar != "") gsub(qchar, quote_subrepl(echar) "&", value); - else gsub(qchar, "&&", value); - } - if (qchar != "" && (index(value, mvsep) != 0 || index(value, colsep) != 0 || index(value, qchar) != 0 || csv__should_quote(value))) { - line = line qchar value qchar - } else { - line = line value - } - } - return line -} -function array_formatcsv(fields) { - return array_formatcsv2(fields, ",", ";", "\"", "") -} -function array_printcsv(fields, output) { - printto(array_formatcsv(fields), output) -} -function get_formatcsv( fields) { - array_fill(fields) - return array_formatcsv(fields) -} -function formatcsv() { - $0 = get_formatcsv() -} -function printcsv(output, fields) { - array_fill(fields) - array_printcsv(fields, output) -} -function array_findcsv(fields, input, field, value, nbfields, orig, found, i) { - array_new(orig) - array_fill(orig) - array_new(fields) - found = 0 - while ((getline 0) { - array_parsecsv(fields, $0, nbfields) - if (fields[field] == value) { - found = 1 - break - } - } - close(input) - array_getline(orig) - if (!found) { - delete fields - if (nbfields) { - nbfields = int(nbfields) - i = array_len(fields) - while (i < nbfields) { - i++ - fields[i] = "" - } - } - } - return found -} diff --git a/lib/nulib/awk/enc.base64 b/lib/nulib/awk/enc.base64 deleted file mode 100644 index b782fcf..0000000 --- a/lib/nulib/awk/enc.base64 +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 mode: awk -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -function base64__and(var, x, l_res, l_i) { - l_res = 0; - for (l_i = 0; l_i < 8; l_i++){ - if (var%2 == 1 && x%2 == 1) l_res = l_res/2 + 128; - else l_res /= 2; - var = int(var/2); - x = int(x/2); - } - return l_res; -} -# Rotate bytevalue left x times -function base64__lshift(var, x) { - while(x > 0){ - var *= 2; - x--; - } - return var; -} -# Rotate bytevalue right x times -function base64__rshift(var, x) { - while(x > 0){ - var = int(var/2); - x--; - } - return var; -} -BEGIN { - BASE64__BYTES = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" -} -function b64decode(src, result, base1, base2, base3, base4) { - result = "" - while (length(src) > 0) { - # Specify byte values - base1 = substr(src, 1, 1) - base2 = substr(src, 2, 1) - base3 = substr(src, 3, 1); if (base3 == "") base3 = "=" - base4 = substr(src, 4, 1); if (base4 == "") base4 = "=" - # Now find numerical position in BASE64 string - byte1 = index(BASE64__BYTES, base1) - 1 - if (byte1 < 0) byte1 = 0 - byte2 = index(BASE64__BYTES, base2) - 1 - if (byte2 < 0) byte2 = 0 - byte3 = index(BASE64__BYTES, base3) - 1 - if (byte3 < 0) byte3 = 0 - byte4 = index(BASE64__BYTES, base4) - 1 - if (byte4 < 0) byte4 = 0 - # Reconstruct ASCII string - result = result sprintf( "%c", base64__lshift(base64__and(byte1, 63), 2) + base64__rshift(base64__and(byte2, 48), 4) ) - if (base3 != "=") result = result sprintf( "%c", base64__lshift(base64__and(byte2, 15), 4) + base64__rshift(base64__and(byte3, 60), 2) ) - if (base4 != "=") result = result sprintf( "%c", base64__lshift(base64__and(byte3, 3), 6) + byte4 ) - # Decrease incoming string with 4 - src = substr(src, 5) - } - return result -} diff --git a/lib/nulib/bash/base b/lib/nulib/bash/base deleted file mode 100644 index cc3d388..0000000 --- a/lib/nulib/bash/base +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -# shim pour les fonctions de nulib.sh au cas où ce module n'est pas chargée -if [ -z "$NULIBDIR" -o "$NULIBDIR" != "$NULIBINIT" ]; then - function module:() { :; } - function function:() { :; } - function require:() { :; } - function import:() { :; } -fi -##@include base.init -##@include base.core -##@include base.str -##@include base.arr -##@include base.io -##@include base.eval -##@include base.split -##@include base.path -##@include base.args -module: base base_ "Chargement de tous les modules base.*" -NULIB_RECURSIVE_IMPORT=1 -require: base.init base.core base.str base.arr base.io base.eval base.split base.path base.args diff --git a/lib/nulib/bash/base.args b/lib/nulib/bash/base.args deleted file mode 100644 index 816100a..0000000 --- a/lib/nulib/bash/base.args +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.args base_ "Fonctions de base: analyse d'arguments" -require: base.arr - -function: base_myargs_local "Afficher des commandes pour rendre locales des variables utilisées par base_myargs() - -Cela permet d'utiliser base_myargs() à l'intérieur d'une fonction. Par défaut, la génération automatique de l'autocomplete est désactivée." -function base_myargs_local() { - # par défaut, désactiver génération de autocomplete - echo "local NULIB_ARGS_HELP_DESC NULIB_ARGS_HELP_USAGE NULIB_ARGS_HELP_OPTIONS args" - echo "local NULIB_ARGS_DISABLE_AC=1" - echo "local NULIB_ARGS_ONERROR_RETURN=1" -} - -function: base_myargs: "Débuter la description des arguments reconnus par ce script. - -Arguments -: \$1 est un résumé de l'objet de ce script -: \$2 est le nom du script s'il est différent de \$MYNAME - -Le mode opératoire est généralement le suivant: -~~~ -myargs: -desc \"faire un traitement\" -usage \"MYNAME [options] \" -arg -o:,--output:file output= \"spécifier le fichier destination\" -arg -h:,--host:host hosts+ \"spécifier les hôtes concernés\" -arg -c,--count count=1 -parse \"\$@\"; set -- \"\${args[@]}\" -~~~" -function base_myargs:() { - NULIB_ARGS_HELP_DESC= - NULIB_ARGS_HELP_USAGE= - NULIB_ARGS_HELP_OPTIONS=() - args=() - function desc() { base_myargs_desc "$@"; } - function usage() { base_myargs_usage "$@"; } - function arg() { base_myargs_add "$@"; } - function parse() { base_myargs_parse "$@"; } -} - -function: base_myargs_desc "" -function base_myargs_desc() { - NULIB_ARGS_HELP_DESC="$*" -} - -function: base_myargs_usage "" -function base_myargs_usage() { - NULIB_ARGS_HELP_USAGE="$*" -} - -function: base_myargs_add "Ajouter une définition d'option - -Syntaxes -: base_arg MODE -: base_arg [MODE] -OPTIONS ACTION DESC -: base_arg [MODE] VARIABLE DESC - -MODE peut être l'un des caractères '+', '-', '%' et a un effet sur l'analyse -entière de la ligne de commande -* Les caractères '+' et '-' influent sur la méthode d'analyse. Par défaut, les - options sont valides n'importe où sur la ligne de commande. Avec '+', - l'analyse s'arrête au premier argument qui n'est pas une option. Avec '-', les - options sont valides n'importe ou sur la ligne de commande, mais les arguments - ne sont pas réordonnés, et apparaissent dans l'ordre de leur mention. -* Le caractère '%' demande que toutes les variables mentionnées à partir de ce - moment soient initialisées. Elle sont garanties d'être vides. - -Avec la première syntaxe, on définit précisément l'option. Deux formes sont -supportées. La forme détermine le type d'action -* Avec la forme '-OPT VAR[=VALUE]', OPT est une description d'option, VAR un nom - de variable à mettre à jour, et VALUE une valeur éventuelle pour les options - sans argument. Si plusieurs options sont mentionnées, séparées par des - virgules, alors tous les options partagent les mêmes paramètres. - - OPT peut être de la forme '-o' ou '--longopt' pour des options sans arguments. - Dans ce cas, VAR obtient le nombre de fois que l'option est mentionnée (vide - pour aucune mention, '1' pour une seule mention, etc.), sauf si on utilise la - forme VAR=VALUE, auquel cas la variable obtient la valeur VALUE, et le nombre - d'occurences de l'option n'est pas compté. - - Pour faciliter la lecture: - * '--longopt .' est équivalent à '--longopt longopt' - * '--longopt: .' est équivalent à '--longopt: longopt=' - - Avec les formes '-o:' et '--longopt:', l'option prend un argument obligatoire. - Avec les formes '-o::' et '--longopt::', l'option prend un argument facultatif - (dans ce cas, la valeur de l'option sur la ligne de commande doit - obligatoirement être collée à l'option.) - - Si ces options sont mentionnées plusieurs fois sur la ligne de commande, alors - la variable de destination est un tableau qui contient toutes les valeurs. Le - traitement de la valeur d'une variable dépend de la forme utilisée. - * Avec une option sans argument, le comportement est celui décrit ci-dessus. - * Avec une option qui prend des arguments, la forme '-o: VAR' considère que - VAR est un tableau qui contiendra toutes les valeurs mentionnées dans les - options. Avec la forme '-o: VAR=', la variable n'est pas un tableau et - contient toujours la dernière valeur spécifiée. -* Dans la forme 'opt \$cmd', la commande cmd est executée avec eval *dès* que - l'option est rencontrée. La variable option_ contient l'option, e.g. '-o' ou - '--longopt'. Le cas échéant, la variable value_ contient la valeur de - l'option. La fonction 'set@ NAME' met à jour la variable NAME, soit en lui - donnant la valeur \$value_, soit en l'incrémentant, suivant le type d'option. - La fonction 'inc@ NAME' incrémente la variable NAME, 'res@ NAME [VALUE]' - initialise la variable à la valeur VALUE, 'add@ NAME [VALUE]' ajoute VALUE à - la fin du tableau NAME. Par défaut, VALUE vaut \$value_ - -Avec la deuxième syntaxe, l'option est déterminée sur la base du nom de la -variable. -* Une variable de la forme 'sansarg' est pour une option simple qui ne prend pas - d'argument -* Une variable de la forme 'avecarg=[default-value]' est pour une option qui - prend un argument. -L'option générée est une option longue. En l'occurence, les options générées -sont respectivement '--sansarg' et '--avecarg:' -Les variables et les options sont toujours en minuscule. Pour les variables, le -caractère '-' est remplacé par '_'. Si une option contient une lettre en -majuscule, l'option courte correspondante à cette lettre sera aussi reconnue. - -" -function base_myargs_add() { - # description des options - base_array_add args "${@:1:2}" - # puis construire la description de l'option pour l'aide - local -a os; local o odesc - base_array_split os "$1" , - for o in "${os[@]}"; do - o="${o%%:*}" - [ -n "$odesc" ] && odesc="$odesc, " - odesc="$odesc$o" - done - for o in "${os[@]}"; do - if [[ "$o" == *:* ]]; then - if [ "${2#\$}" != "$2" ]; then - o=ARG - else - o="${2%%=*}" - o="${o^^}" - fi - [ -n "$odesc" ] && odesc="$odesc " - odesc="$odesc$o" - fi - break - done - base_array_add NULIB_ARGS_HELP_OPTIONS "$odesc" - [ -n "$3" ] && base_array_add NULIB_ARGS_HELP_OPTIONS "$3" -} - -function: base_myargs_show_help "" -function base_myargs_show_help() { - local help="$MYNAME" - [ -n "$NULIB_ARGS_HELP_DESC" ] && help="$help: $NULIB_ARGS_HELP_DESC" - [ -n "$NULIB_ARGS_HELP_USAGE" ] && help="$help - -USAGE - $NULIB_ARGS_HELP_USAGE" - [ ${#NULIB_ARGS_HELP_OPTIONS[*]} -gt 0 ] && help="$help - -OPTIONS" - echo "$help" - for help in "${NULIB_ARGS_HELP_OPTIONS[@]}"; do - echo "$help" - done -} - -function: base_myargs_parse "" -function base_myargs_parse() { - [ -z "$NULIB_NO_DISABLE_SET_X" ] && [[ $- == *x* ]] && { set +x; local NULIB_ARGS_SET_X=1; } - local r=0 - if ! parse_opts "${PRETTYOPTS[@]}" "${args[@]}" @ args -- "$@"; then - edie "$args" - r=$? - fi - [ -n "$NULIB_ARGS_SET_X" ] && set -x; return $r -} diff --git a/lib/nulib/bash/base.arr b/lib/nulib/bash/base.arr deleted file mode 100644 index 0524375..0000000 --- a/lib/nulib/bash/base.arr +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.arr base_ "Fonctions de base: gestion des variables tableaux" -require: base.core base.str - -function: base_array_count "retourner le nombre d'éléments du tableau \$1" -function base_array_count() { - eval "echo \${#$1[*]}" -} - -function: base_array_isempty "tester si le tableau \$1 est vide" -function base_array_isempty() { - eval "[ \${#$1[*]} -eq 0 ]" -} - -function: base_array_new "créer un tableau vide dans la variable \$1" -function base_array_new() { - eval "$1=()" -} - -function: base_array_copy "copier le contenu du tableau \$2 dans le tableau \$1" -function base_array_copy() { - eval "$1=(\"\${$2[@]}\")" -} - -function: base_array_add "ajouter les valeurs \$2..@ à la fin du tableau \$1" -function base_array_add() { - local __aa_a="$1"; shift - eval "$__aa_a=(\"\${$__aa_a[@]}\" \"\$@\")" -} - -function: base_array_ins "insérer les valeurs \$2..@ au début du tableau \$1" -function base_array_ins() { - local __aa_a="$1"; shift - eval "$__aa_a=(\"\$@\" \"\${$__aa_a[@]}\")" -} - -function: base_array_del "supprimer *les* valeurs \$2 du tableau \$1" -function base_array_del() { - local __ad_v - local -a __ad_vs - eval ' -for __ad_v in "${'"$1"'[@]}"; do - if [ "$__ad_v" != "$2" ]; then - __ad_vs=("${__ad_vs[@]}" "$__ad_v") - fi -done' - base_array_copy "$1" __ad_vs -} - -function: base_array_addu "ajouter la valeur \$2 au tableau \$1, si la valeur n'y est pas déjà - -Retourner vrai si la valeur a été ajoutée" -function base_array_addu() { - local __as_v - eval ' -for __as_v in "${'"$1"'[@]}"; do - [ "$__as_v" == "$2" ] && return 1 -done' - base_array_add "$1" "$2" - return 0 -} - -function: base_array_insu "insérer la valeur \$2 au début du tableau tableau \$1, si la valeur n'y est pas déjà - -Retourner vrai si la valeur a été ajoutée." -function base_array_insu() { - local __as_v - eval ' -for __as_v in "${'"$1"'[@]}"; do - [ "$__as_v" == "$2" ] && return 1 -done' - base_array_ins "$1" "$2" - return 0 -} - -function: base_array_fillrange "Initialiser le tableau \$1 avec les nombres de \$2(=1) à \$3(=10) avec un step de \$4(=1)" -function base_array_fillrange() { - local -a __af_vs - local __af_i="${2:-1}" __af_to="${3:-10}" __af_step="${4:-1}" - while [ "$__af_i" -le "$__af_to" ]; do - __af_vs=("${__af_vs[@]}" "$__af_i") - __af_i=$(($__af_i + $__af_step)) - done - base_array_copy "$1" __af_vs -} - -function: base_array_eq "tester l'égalité des tableaux \$1 et \$2" -function base_array_eq() { - local -a __ae_a1 __ae_a2 - base_array_copy __ae_a1 "$1" - base_array_copy __ae_a2 "$2" - [ ${#__ae_a1[*]} -eq ${#__ae_a2[*]} ] || return 1 - local __ae_v __ae_i=0 - for __ae_v in "${__ae_a1[@]}"; do - [ "$__ae_v" == "${__ae_a2[$__ae_i]}" ] || return 1 - __ae_i=$(($__ae_i + 1)) - done - return 0 -} - -function: base_array_contains "tester si le tableau \$1 contient la valeur \$2" -function base_array_contains() { - local __ac_v - eval ' -for __ac_v in "${'"$1"'[@]}"; do - [ "$__ac_v" == "$2" ] && return 0 -done' - return 1 -} - -function: base_array_icontains "tester si le tableau \$1 contient la valeur \$2, sans tenir compte de la casse" -function base_array_icontains() { - local __ac_v - eval ' -for __ac_v in "${'"$1"'[@]}"; do - [ "${__ac_v,,} == "${2,,}" ] && return 0 -done' - return 1 -} - -function: base_array_find "si le tableau \$1 contient la valeur \$2, afficher l'index de la valeur. Si le tableau \$3 est spécifié, afficher la valeur à l'index dans ce tableau" -function base_array_find() { - local __af_i __af_v - __af_i=0 - eval ' -for __af_v in "${'"$1"'[@]}"; do - if [ "$__af_v" == "$2" ]; then - if [ -n "$3" ]; then - recho "${'"$3"'[$__af_i]}" - else - echo "$__af_i" - fi - return 0 - fi - __af_i=$(($__af_i + 1)) -done' - return 1 -} - -function: base_array_reverse "Inverser l'ordre des élément du tableau \$1" -function base_array_reverse() { - local -a __ar_vs - local __ar_v - base_array_copy __ar_vs "$1" - base_array_new "$1" - for __ar_v in "${__ar_vs[@]}"; do - base_array_ins "$1" "$__ar_v" - done -} - -function: base_array_replace "dans le tableau \$1, remplacer toutes les occurences de \$2 par \$3..*" -function base_array_replace() { - local __ar_sn="$1"; shift - local __ar_f="$1"; shift - local -a __ar_s __ar_d - local __ar_v - base_array_copy __ar_s "$__ar_sn" - for __ar_v in "${__ar_s[@]}"; do - if [ "$__ar_v" == "$__ar_f" ]; then - __ar_d=("${__ar_d[@]}" "$@") - else - __ar_d=("${__ar_d[@]}" "$__ar_v") - fi - done - base_array_copy "$__ar_sn" __ar_d -} - -function: base_array_each "Pour chacune des valeurs ITEM du tableau \$1, appeler la fonction \$2 avec les arguments (\$3..@ ITEM)" -function base_array_each() { - local __ae_v - local -a __ae_a - base_array_copy __ae_a "$1"; shift - for __ae_v in "${__ae_a[@]}"; do - "$@" "$__ae_v" - done -} - -function: base_array_map "Pour chacune des valeurs ITEM du tableau \$1, appeler la fonction \$2 avec les arguments (\$3..@ ITEM), et remplacer la valeur par le résultat de la fonction" -function base_array_map() { - local __am_v - local -a __am_a __am_vs - local __am_an="$1"; shift - local __am_f="$1"; shift - base_array_copy __am_a "$__am_an" - for __am_v in "${__am_a[@]}"; do - __am_vs=("${__am_vs[@]}" "$("$__am_f" "$@" "$__am_v")") - done - base_array_copy "$__am_an" __am_vs -} - -function: base_array_first "afficher la première valeur du tableau \$1" -function base_array_first() { - eval "recho \"\${$1[@]:0:1}\"" -} - -function: base_array_last "afficher la dernière valeur du tableau \$1" -function base_array_last() { - eval "recho \"\${$1[@]: -1:1}\"" -} - -function: base_array_copy_firsts "copier toutes les valeurs du tableau \$2(=\$1) dans le tableau \$1, excepté la dernière" -function base_array_copy_firsts() { - eval "$1=(\"\${${2:-$1}[@]:0:\$((\${#${2:-$1}[@]}-1))}\")" -} - -function: base_array_copy_lasts "copier toutes les valeurs du tableau \$2(=\$1) dans le tableau \$1, excepté la première" -function base_array_copy_lasts() { - eval "$1=(\"\${${2:-$1}[@]:1}\")" -} - -function: base_array_extend "ajouter le contenu du tableau \$2 au tableau \$1" -function base_array_extend() { - eval "$1=(\"\${$1[@]}\" \"\${$2[@]}\")" -} - -function: base_array_extendu "ajouter chacune des valeurs du tableau \$2 au tableau \$1, si ces valeurs n'y sont pas déjà - -Retourner vrai si au moins une valeur a été ajoutée" -function base_array_extendu() { - local __ae_v __ae_s=1 - eval ' -for __ae_v in "${'"$2"'[@]}"; do - base_array_addu "$1" "$__ae_v" && __ae_s=0 -done' - return "$__ae_s" -} - -function: base_array_extend_firsts "ajouter toutes les valeurs du tableau \$2 dans le tableau \$1, excepté la dernière" -function base_array_extend_firsts() { - eval "$1=(\"\${$1[@]}\" \"\${$2[@]:0:\$((\${#$2[@]}-1))}\")" -} - -function: base_array_extend_lasts "ajouter toutes les valeurs du tableau \$2 dans le tableau \$1, excepté la première" -function base_array_extend_lasts() { - eval "$1=(\"\${$1[@]}\" \"\${$2[@]:1}\")" -} - -function: base_array_xsplit "créer le tableau \$1 avec chaque élément de \$2 (un ensemble d'éléments séparés par \$3, qui vaut ':' par défaut)" -function base_array_xsplit() { - eval "$1=($(recho_ "$2" | lawk -v RS="${3:-:}" ' -{ - gsub(/'\''/, "'\'\\\\\'\''") - print "'\''" $0 "'\''" -}'))" #" -} - -function: base_array_xsplitc "variante de base_array_xsplit() où le séparateur est ',' par défaut" -function base_array_xsplitc() { - base_array_xsplit "$1" "$2" "${3:-,}" -} - -function: base_array_split "créer le tableau \$1 avec chaque élément de \$2 (un ensemble d'éléments séparés par \$3, qui vaut ':' par défaut) - -Les éléments vides sont ignorés. par exemple \"a::b\" est équivalent à \"a:b\"" -function base_array_split() { - eval "$1=($(recho_ "$2" | lawk -v RS="${3:-:}" ' -/^$/ { next } -{ - gsub(/'\''/, "'\'\\\\\'\''") - print "'\''" $0 "'\''" -}'))" #" -} - -function: base_array_splitc "variante de base_array_split() où le séparateur est ',' par défaut" -function base_array_splitc() { - base_array_split "$1" "$2" "${3:-,}" -} - -function: base_array_xsplitl "créer le tableau \$1 avec chaque ligne de \$2" -function base_array_xsplitl() { - eval "$1=($(recho_ "$2" | strnl2lf | lawk ' -{ - gsub(/'\''/, "'\'\\\\\'\''") - print "'\''" $0 "'\''" -}'))" #" -} - -function: base_array_splitl "créer le tableau \$1 avec chaque ligne de \$2 - -Les lignes vides sont ignorés." -function base_array_splitl() { - eval "$1=($(recho_ "$2" | strnl2lf | lawk ' -/^$/ { next } -{ - gsub(/'\''/, "'\'\\\\\'\''") - print "'\''" $0 "'\''" -}'))" #" -} - -function: base_array_join "afficher le contenu du tableau \$1 sous forme d'une liste de valeurs séparées par \$2 (qui vaut ':' par défaut) - -* Si \$1==\"@\", alors les éléments du tableaux sont les arguments de la fonction à partir de \$3 -* Si \$1!=\"@\" et que le tableau est vide, afficher \$3 -* Si \$1!=\"@\", \$4 et \$5 sont des préfixes et suffixes à rajouter à chaque élément" -function base_array_join() { - local __aj_an __aj_l __aj_j __aj_s="${2:-:}" __aj_pf __aj_sf - if [ "$1" == "@" ]; then - __aj_an="\$@" - shift; shift - else - __aj_an="\${$1[@]}" - __aj_pf="$4" - __aj_sf="$5" - fi - eval ' -for __aj_l in "'"$__aj_an"'"; do - __aj_j="${__aj_j:+$__aj_j'"$__aj_s"'}$__aj_pf$__aj_l$__aj_sf" -done' - if [ -n "$__aj_j" ]; then - recho "$__aj_j" - elif [ "$__aj_an" != "\$@" -a -n "$3" ]; then - recho "$3" - fi -} - -function: base_array_joinc "afficher les éléments du tableau \$1 séparés par ','" -function base_array_joinc() { - base_array_join "$1" , "$2" "$3" "$4" -} - -function: base_array_joinl "afficher les éléments du tableau \$1 à raison d'un élément par ligne" -function base_array_joinl() { - base_array_join "$1" " -" "$2" "$3" "$4" -} - -function: base_array_mapjoin "map le tableau \$1 avec la fonction \$2, puis afficher le résultat en séparant chaque élément par \$3 - -Les arguments et la sémantique sont les mêmes que pour base_array_join() en -tenant compte de l'argument supplémentaire \$2 qui est la fonction pour -base_array_map() (les autres arguments sont décalés en conséquence)" -function base_array_mapjoin() { - local __amj_src="$1" __amj_func="$2" __amj_sep="$3" - shift; shift; shift - if [ "$__amj_src" == "@" ]; then - local -a __amj_tmpsrc - __amj_tmpsrc=("$@") - __amj_src=__amj_tmpsrc - set -- - fi - local -a __amj_tmp - base_array_copy __amj_tmp "$__amj_src" - base_array_map __amj_tmp "$__amj_func" - base_array_join __amj_tmp "$__amj_sep" "$@" -} - -function: base_array_fix_paths "Corriger les valeurs du tableau \$1. Les valeurs contenant le séparateur \$2(=':') sont séparées en plusieurs valeurs. - -Par exemple avec le tableau input=(a b:c), le résultat est input=(a b c)" -function base_array_fix_paths() { - local __afp_an="$1" __afp_s="${2:-:}" - local -a __afp_vs - local __afp_v - base_array_copy __afp_vs "$__afp_an" - base_array_new "$__afp_an" - for __afp_v in "${__afp_vs[@]}"; do - base_array_split __afp_v "$__afp_v" "$__afp_s" - base_array_extend "$__afp_an" __afp_v - done -} diff --git a/lib/nulib/bash/base.core b/lib/nulib/bash/base.core deleted file mode 100644 index ef130f6..0000000 --- a/lib/nulib/bash/base.core +++ /dev/null @@ -1,458 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.core base_ "Fonctions de base: fondement" - -function: echo_ "afficher la valeur \$* sans passer à la ligne" -function echo_() { echo -n "$*"; } - -function: recho "afficher une valeur brute. - -contrairement à la commande echo, ne reconnaitre aucune option (i.e. -e, -E, -n -ne sont pas signifiants)" -function recho() { - if [[ "${1:0:2}" == -[eEn] ]]; then - local first="${1:1}"; shift - echo -n - - echo "$first" "$@" - else - echo "$@" - fi -} - -function: recho_ "afficher une valeur brute, sans passer à la ligne. - -contrairement à la commande echo, ne reconnaitre aucune option (i.e. -e, -E, -n -ne sont pas signifiants)" -function recho_() { - if [[ "${1:0:2}" == -[eEn] ]]; then - local first="${1:1}"; shift - echo -n - - echo -n "$first" "$@" - else - echo -n "$@" - fi -} - -function: _qval "Dans la chaine \$*, remplacer: -~~~ -\\ par \\\\ -\" par \\\" -\$ par \\\$ -\` par \\\` -~~~ - -Cela permet de quoter une chaine à mettre entre guillements. - -note: la protection de ! n'est pas effectuée, parce que le comportement du shell -est incohérent entre le shell interactif et les scripts. Pour une version plus -robuste, il est nécessaire d'utiliser un programme externe tel que sed ou awk" -function _qval() { - local s="$*" - s="${s//\\/\\\\}" - s="${s//\"/\\\"}" - s="${s//\$/\\\$}" - s="${s//\`/\\\`}" - recho_ "$s" -} - -function: base_should_quote "Tester si la chaine \$* doit être mise entre quotes" -function base_should_quote() { - # pour optimiser, toujours mettre entre quotes si plusieurs arguments sont - # spécifiés ou si on spécifie une chaine vide ou de plus de 80 caractères - [ $# -eq 0 -o $# -gt 1 -o ${#1} -eq 0 -o ${#1} -gt 80 ] && return 0 - # sinon, tester si la chaine contient des caractères spéciaux - local s="$*" - s="${s//[a-zA-Z0-9]/}" - s="${s//,/}" - s="${s//./}" - s="${s//+/}" - s="${s//\//}" - s="${s//-/}" - s="${s//_/}" - s="${s//=/}" - [ -n "$s" ] -} - -function: qval "Afficher la chaine \$* quotée avec \"" -function qval() { - echo -n \" - _qval "$@" - echo \" -} - -function: qvalm "Afficher la chaine \$* quotée si nécessaire avec \"" -function qvalm() { - if base_should_quote "$@"; then - echo -n \" - _qval "$@" - echo \" - else - recho "$@" - fi -} - -function: qvalr "Afficher la chaine \$* quotée si nécessaire avec \", sauf si elle est vide" -function qvalr() { - if [ -z "$*" ]; then - : - elif base_should_quote "$@"; then - echo -n \" - _qval "$@" - echo \" - else - recho "$@" - fi -} - -function: qvals "Afficher chaque argument de cette fonction quotée le cas échéant avec \", chaque valeur étant séparée par un espace" -function qvals() { - local arg first=1 - for arg in "$@"; do - [ -z "$first" ] && echo -n " " - if base_should_quote "$arg"; then - echo -n \" - _qval "$arg" - echo -n \" - else - recho_ "$arg" - fi - first= - done - [ -z "$first" ] && echo -} - -function: qwc "Dans la chaine \$*, remplacer: -~~~ - \\ par \\\\ -\" par \\\" -\$ par \\\$ -\` par \\\` -~~~ -puis quoter la chaine avec \", sauf les wildcards *, ? et [class] - -Cela permet de quoter une chaine permettant de glober des fichiers, e.g -~~~ -eval \"ls \$(qwc \"\$value\")\" -~~~ - -note: la protection de ! n'est pas effectuée, parce que le comportement du shell -est incohérent entre le shell interactif et les scripts. Pour une version plus -robuste, il est nécessaire d'utiliser un programme externe tel que sed ou awk" -function qwc() { - local s="$*" - s="${s//\\/\\\\}" - s="${s//\"/\\\"}" - s="${s//\$/\\\$}" - s="${s//\`/\\\`}" - local r a b c - while [ -n "$s" ]; do - a=; b=; c= - a=; [[ "$s" == *\** ]] && { a="${s%%\**}"; a=${#a}; } - b=; [[ "$s" == *\?* ]] && { b="${s%%\?*}"; b=${#b}; } - c=; [[ "$s" == *\[* ]] && { c="${s%%\[*}"; c=${#c}; } - if [ -z "$a" -a -z "$b" -a -z "$c" ]; then - r="$r\"$s\"" - break - fi - if [ -n "$a" ]; then - [ -n "$b" ] && [ $a -lt $b ] && b= - [ -n "$c" ] && [ $a -lt $c ] && c= - fi - if [ -n "$b" ]; then - [ -n "$a" ] && [ $b -lt $a ] && a= - [ -n "$c" ] && [ $b -lt $c ] && c= - fi - if [ -n "$c" ]; then - [ -n "$a" ] && [ $c -lt $a ] && a= - [ -n "$b" ] && [ $c -lt $b ] && b= - fi - if [ -n "$a" ]; then # PREFIX* - a="${s%%\**}" - s="${s#*\*}" - [ -n "$a" ] && r="$r\"$a\"" - r="$r*" - elif [ -n "$b" ]; then # PREFIX? - a="${s%%\?*}" - s="${s#*\?}" - [ -n "$a" ] && r="$r\"$a\"" - r="$r?" - elif [ -n "$c" ]; then # PREFIX[class] - a="${s%%\[*}" - b="${s#*\[}"; b="${b%%\]*}" - s="${s:$((${#a} + ${#b} + 2))}" - [ -n "$a" ] && r="$r\"$a\"" - r="$r[$b]" - fi - done - recho_ "$r" -} - -function: qlines "Traiter chaque ligne de l'entrée standard pour en faire des chaines quotées avec '" -function qlines() { - sed "s/'/'\\\\''/g; s/.*/'&'/g" -} - -function: setv "initialiser la variable \$1 avec la valeur \$2..* - -note: en principe, la syntaxe est 'setv var values...'. cependant, la syntaxe 'setv var=values...' est supportée aussi" -function setv() { - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - fi - eval "$s__var=\"\$*\"" -} - -function: _setv "Comme la fonction setv() mais ne supporte que la syntaxe '_setv var values...' - -Cette fonction est légèrement plus rapide que setv()" -function _setv() { - local s__var="$1"; shift - eval "$s__var=\"\$*\"" -} - -function: echo_setv "Afficher la commande qui serait lancée par setv \"\$@\"" -function echo_setv() { - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - fi - echo "$s__var=$(qvalr "$*")" -} - -function: echo_setv2 "Afficher la commande qui recrée la variable \$1. - -Equivalent à -~~~ -echo_setv \"\$1=\${!1}\" -~~~ - -Si d'autres arguments que le nom de la variable sont spécifiés, cette fonction -se comporte comme echo_setv()" -function echo_setv2() { - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - fi - if [ $# -eq 0 ]; then - echo_setv "$s__var" "${!s__var}" - else - echo_setv "$s__var" "$@" - fi -} - -function: seta "initialiser le tableau \$1 avec les valeurs \$2..@ - -note: en principe, la syntaxe est 'seta array values...'. cependant, la syntaxe -'seta array=values...' est supportée aussi" -function seta() { - local s__array="$1"; shift - if [[ "$s__array" == *=* ]]; then - set -- "${s__array#*=}" "$@" - s__array="${s__array%%=*}" - fi - eval "$s__array=(\"\$@\")" -} - -function: _seta "Comme la fonction seta() mais ne supporte que la syntaxe '_seta array values...' - -Cette fonction est légèrement plus rapide que seta()" -function _seta() { - local s__array="$1"; shift - eval "$s__array=(\"\$@\")" -} - -function: echo_seta "Afficher la commande qui serait lancée par seta \"\$@\"" -function echo_seta() { - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - fi - echo "$s__var=($(qvals "$@"))" -} - -function: echo_seta2 "Afficher la commande qui recrée le tableau \$1 - -Si d'autres arguments que le nom de tableau sont spécifiés, cette fonction se -comporte comme echo_seta()" -function echo_seta2() { - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - elif [ $# -eq 0 ]; then - eval "set -- \"\${$s__var[@]}\"" - fi - echo "$s__var=($(qvals "$@"))" -} - -function: setx "Initialiser une variable avec le résultat d'une commande - -* syntaxe 1: initialiser la variable \$1 avec le résultat de la commande \"\$2..@\" - ~~~ - setx var cmd - ~~~ - note: en principe, la syntaxe est 'setx var cmd args...'. cependant, la syntaxe - 'setx var=cmd args...' est supportée aussi - -* syntaxe 2: initialiser le tableau \$1 avec le résultat de la commande - \"\$2..@\", chaque ligne du résultat étant un élément du tableau - ~~~ - setx -a array cmd - ~~~ - note: en principe, la syntaxe est 'setx -a array cmd args...'. cependant, la - syntaxe 'setx -a array=cmd args...' est supportée aussi" -function setx() { - if [ "$1" == -a ]; then - shift - local s__array="$1"; shift - if [[ "$s__array" == *=* ]]; then - set -- "${s__array#*=}" "$@" - s__array="${s__array%%=*}" - fi - eval "$s__array=($("$@" | qlines))" - else - local s__var="$1"; shift - if [[ "$s__var" == *=* ]]; then - set -- "${s__var#*=}" "$@" - s__var="${s__var%%=*}" - fi - eval "$s__var="'"$("$@")"' - fi -} - -function: _setvx "Comme la fonction setx() mais ne supporte que l'initialisation d'une variable scalaire avec la syntaxe '_setvx var cmd args...' pour gagner (un peu) en rapidité d'exécution." -function _setvx() { - local s__var="$1"; shift - eval "$s__var="'"$("$@")"' -} - -function: _setax "Comme la fonction setx() mais ne supporte que l'initialisation d'un tableau avec la syntaxe '_setax array cmd args...' pour gagner (un peu) en rapidité d'exécution." -function _setax() { - local s__array="$1"; shift - eval "$s__array=($("$@" | qlines))" -} - -function: base_is_defined "tester si la variable \$1 est définie" -function base_is_defined() { - [ -n "$(declare -p "$1" 2>/dev/null)" ] -} - -function: base_is_array "tester si la variable \$1 est un tableau" -function base_is_array() { - [[ "$(declare -p "$1" 2>/dev/null)" =~ declare\ -[^\ ]*a[^\ ]*\ ]] -} - -function: base_array_local "afficher les commandes pour faire une copie dans la variable locale \$1 du tableau \$2" -function base_array_local() { - if [ "$1" == "$2" ]; then - declare -p "$1" 2>/dev/null || echo "local -a $1" - else - echo "local -a $1; $1=(\"\${$2[@]}\")" - fi -} - -function: base_upvar "Implémentation de upvar() de http://www.fvue.nl/wiki/Bash:_Passing_variables_by_reference - -USAGE -~~~ -local varname && base_upvar varname values... -~~~ -* @param varname Variable name to assign value to -* @param values Value(s) to assign. If multiple values (> 1), an array is - assigned, otherwise a single value is assigned." -function base_upvar() { - if unset -v "$1"; then - if [ $# -lt 2 ]; then - eval "$1=\"\$2\"" - else - eval "$1=(\"\${@:2}\")" - fi - fi -} - -function: base_array_upvar "Comme base_upvar() mais force la création d'un tableau, même s'il y a que 0 ou 1 argument" -function base_array_upvar() { - unset -v "$1" && eval "$1=(\"\${@:2}\")" -} - -function: base_upvars "Implémentation modifiée de upvars() de http://www.fvue.nl/wiki/Bash:_Passing_variables_by_reference - -Par rapport à l'original, il n'est plus nécessaire de préfixer une variable -scalaire avec -v, et -a peut être spécifié sans argument. - -USAGE -~~~ -local varnames... && base_upvars [varname value | -aN varname values...]... -~~~ -* @param -a assigns remaining values to varname as array -* @param -aN assigns next N values to varname as array. Returns 1 if wrong - number of options occurs" -function base_upvars() { - while [ $# -gt 0 ]; do - case "$1" in - -a) - unset -v "$2" && eval "$2=(\"\${@:3}\")" - break - ;; - -a*) - unset -v "$2" && eval "$2=(\"\${@:3:${1#-a}}\")" - shift $((${1#-a} + 2)) || return 1 - ;; - *) - unset -v "$1" && eval "$1=\"\$2\"" - shift; shift - ;; - esac - done -} - -function: base_set_debug "Passer en mode DEBUG" -function base_set_debug() { - export NULIB_DEBUG=1 -} - -function: base_is_debug "Tester si on est en mode DEBUG" -function base_is_debug() { - [ -n "$NULIB_DEBUG" ] -} - -function: lawk "Lancer GNUawk avec la librairie 'base'" -function lawk() { - gawk -i base "$@" -} - -function: cawk "Lancer GNUawk avec LANG=C et la librairie 'base' - -Le fait de forcer la valeur de LANG permet d'éviter les problèmes avec la locale" -function cawk() { - LANG=C gawk -i base "$@" -} - -function: lsort "Lancer sort avec support de la locale courante" -function: csort "Lancer sort avec LANG=C pour désactiver le support de la locale - -Avec LANG!=C, sort utilise les règles de la locale pour le tri, et par -exemple, avec LANG=fr_FR.UTF-8, la locale indique que les ponctuations doivent -être ignorées." -function lsort() { sort "$@"; } -function csort() { LANG=C sort "$@"; } - -function: lgrep "Lancer grep avec support de la locale courante" -function: cgrep "Lancer grep avec LANG=C pour désactiver le support de la locale" -function lgrep() { grep "$@"; } -function cgrep() { LANG=C grep "$@"; } - -function: lsed "Lancer sed avec support de la locale courante" -function: csed "Lancer sed avec LANG=C pour désactiver le support de la locale" -function lsed() { sed "$@"; } -function csed() { LANG=C sed "$@"; } - -function: ldiff "Lancer diff avec support de la locale courante" -function: cdiff "Lancer diff avec LANG=C pour désactiver le support de la locale" -function ldiff() { diff "$@"; } -function cdiff() { LANG=C diff "$@"; } diff --git a/lib/nulib/bash/base.eval b/lib/nulib/bash/base.eval deleted file mode 100644 index d443a2d..0000000 --- a/lib/nulib/bash/base.eval +++ /dev/null @@ -1,468 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.eval base_ "Fonctions de base: évaluation d'expressions" -require: base.str base.arr - -################################################################################ -# Chaines - -function: base_evals "Appliquer à une chaine de caractères une suite de traitements, e.g: -~~~ -base_evals var deref +suffix -~~~ -est équivalent à -~~~ -echo \"\${var}suffix\" -~~~ - -En commençant avec la valeur initiale \$1, les arguments \$2..* sont des -opérations à appliquer dans l'ordre. - -Les opérations suivantes considèrent que la valeur courante est un nom de -variable: -~~~ -:- := :? :+ deref dcount -~~~ - -Toutes les autres opérations travaillent directement avec la valeur -courante. Les opérations suivantes appliquent une transformation: -~~~ -# % / : ^ , +# -# +% -% + - mid repl -~~~ -IMPORTANT: aucune de ces fonctions ne met en échappement les valeur des -patterns. Ainsi, si un pattern contient des caractères interdits comme \\ ou \$, -il faut d'abord le traiter avec _qval() - -Les opérations suivantes font un test sur la valeur et retournent immédiatement: -~~~ -= == != < > -eq -ne -lt -le -gt -ge -n -z -~~~ - -La syntaxe des opérateurs standards de bash est reprise autant que possible, i.e -si on a l'habitude d'écrire ${varOP} en bash, alors la syntaxe à utiliser à -priori est 'base_evals var OP' ou 'base_evals var deref OP' suivant les -opérateurs. - -Autres opérateurs: -~~~ -deref indirection -dcount nombre d'éléments du tableau -+#STR ajouter un préfixe --#STR supprimer un préfixe -+%STR ou +STR ajouter un suffixe --%STR ou -STR supprimer un suffixe -mid RANGE traiter la chaine avec base_strmid() -repl FROM TO traiter la chaine avec base_strrepl() -~~~ - -Tout autre opérateur est traité comme un appel à une fonction qui prend un seul -argument, la valeur courante, et qui affiche le résultat." -function base_evals() { - local -a es__tmp - local es__value="$1"; shift - while [ $# -gt 0 ]; do - case "$1" in - # l'argument est le nom de la variable - :-*|:=*|:\?*|:+*) eval 'es__value="${'"${es__value}$1"'}"';; - d|deref) es__value="${!es__value}";; - dc|dcount|ds|dsize) - es__value="${es__value}[@]" - es__tmp=("${!es__value}") - es__value="${#es__tmp[@]}" - ;; - # l'argument est la valeur de la variable - \#*|%*|/*|:*|^*|,*) eval 'es__value="${es__value'"$1"'}"';; - l|length) es__value="${#es__value}";; - =|==|!=|\<|\>|-eq|-ne|-lt|-le|-gt|-ge) - es__tmp=(\[ "$es__value" "$@" ]); "${es__tmp[@]}"; return $?;; - -n|-z) es__tmp=(\[ "$1" "$es__value" ]); "${es__tmp[@]}"; return $?;; - +#*) eval 'es__value="'"${1#+#}"'$es__value"';; - -#*) eval 'es__value="${es__value'"${1#-}"'}"';; - +%*) eval 'es__value="$es__value"'"${1#+%}";; - +*) eval 'es__value="$es__value"'"${1#+}";; - -%*) eval 'es__value="${es__value'"${1#-}"'}"';; - -*) eval 'es__value="${es__value%'"${1#-}"'}"';; - mid|strmid|base_strmid) eval 'es__value="$(base_strmid "$2" "$es__value")"'; shift;; - repl|strrepl|base_strrepl) eval 'es__value="$(base_strrepl "$2" "$3" "$es__value")"'; shift; shift;; - *) es__value="$("$1" "$es__value")";; - esac - shift - done - echo "$es__value" -} - -function: base_setxs "équivalent à setx \$1 evals \$2..@" -function base_setxs() { - local -a ss__args - if [ "$1" == -a ]; then ss__args=(-a); shift; fi - local ss__var="$1"; shift - if [[ "$ss__var" == *=* ]]; then - set -- "${ss__var#*=}" "$@" - ss__var="${ss__var%%=*}" - fi - ss__args=("${ss__args[@]}" "$ss__var") - setx "${ss__args[@]}" base_evals "$@" -} - -function: base_cmds "lancer une commande avec comme argument le résultat de evals - -Par exemple, les deux commandes suivantes sont équivalentes: -~~~ -base_cmds CMD ARGS... // EVALARGS -CMD ARGS... \"\$(evals EVALARGS)\" -~~~" -function base_cmds() { - local cs__arg - local -a cs__cmd - while [ $# -gt 0 ]; do - cs__arg="$1"; shift - [ "$cs__arg" == // ] && break - cs__cmd=("${cs__cmd[@]}" "$cs__arg") - done - "${cs__cmd[@]}" "$(base_evals "$@")" -} - -function: base_evalm "construire une chaine en mixant chaines statiques et évaluations de commandes - -Par exemple, les deux commandes suivantes sont équivalentes: -~~~ -evalm //\"string\" cmd args // cmd args //\"string\" -echo \"string\$(cmd args)\$(cmd args)string\" -~~~" -function base_evalm() { - local em__val em__arg - local -a em__cmd - while [ $# -gt 0 ]; do - em__arg="$1" - if [ "${em__arg#//}" != "$em__arg" ]; then - em__val="$em__val${em__arg#//}" - shift - continue - fi - em__cmd=() - while [ $# -gt 0 ]; do - em__arg="$1" - [ "${em__arg#//}" != "$em__arg" ] && break - shift - if [ "${em__arg%//}" != "$em__arg" ]; then - local em__tmp="${em__arg%//}" - if [ -z "${em__tmp//\\/}" ]; then - em__arg="${em__arg#\\}" - em__cmd=("${em__cmd[@]}" "$em__arg") - continue - fi - fi - em__cmd=("${em__cmd[@]}" "$em__arg") - done - [ ${#em__cmd[*]} -gt 0 ] && em__val="$em__val$("${em__cmd[@]}")" - done - echo "$em__val" -} - -function: base_setxm "équivalent à setx \$1 evalm \$2..@" -function base_setxm() { - local -a sm__args - if [ "$1" == -a ]; then sm__args=(-a); shift; fi - local sm__var="$1"; shift - if [[ "$sm__var" == *=* ]]; then - set -- "${sm__var#*=}" "$@" - sm__var="${sm__var%%=*}" - fi - sm__args=("${sm__args[@]}" "$sm__var") - setx "${sm__args[@]}" base_evalm "$@" -} - -function: base_cmdm "lancer une commande avec comme argument le résultat de evalm - -Par exemple, les deux commandes suivantes sont équivalentes: -~~~ -base_cmdm CMD ARGS... // EVALARGS -CMD ARGS... \"\$(evalm EVALARGS)\" -~~~" -function base_cmdm() { - local cm__arg - local -a cm__cmd - while [ $# -gt 0 ]; do - cm__arg="$1"; shift - [ "$cm__arg" == // ] && break - cm__cmd=("${cm__cmd[@]}" "$cm__arg") - done - "${cm__cmd[@]}" "$(base_evalm "$@")" -} - -################################################################################ -# Nombres - -function: base_evali "Evaluer une expression numérique" -function base_evali() { - echo "$(($*))" -} - -################################################################################ -# Tableaux - -################################################################################ -# Composition - -function: base_evalc "Implémenter une syntaxe lisible et naturelle permettant d'enchainer des traitements sur une valeur. - -Par exemple, la commande -~~~ -evalc cmd1... // cmd2... // cmd3... -~~~ -est équivalente à la commande -~~~ -cmd3... \"\$(cmd2... \"\$(cmd1...)\")\" -~~~" -function base_evalc() { - local ec__arg ec__cmd ec__finalcmd - - while [ $# -gt 0 ]; do - ec__arg="$1"; shift - if [ "$ec__arg" == // ]; then - if [ ${#ec__cmd} -gt 0 ]; then - if [ ${#ec__finalcmd} -eq 0 ]; then ec__finalcmd="$ec__cmd" - else ec__finalcmd="$ec__cmd \$($ec__finalcmd)" - fi - fi - ec__cmd= - continue - elif [ "${ec__arg%//}" != "$ec__arg" ]; then - local tmp="${ec__arg%//}" - [ -z "${tmp//\\/}" ] && ec__arg="${ec__arg#\\}" - fi - ec__cmd="$ec__cmd \"$(_qval "$ec__arg")\"" - done - if [ ${#ec__cmd} -gt 0 ]; then - if [ ${#ec__finalcmd} -eq 0 ]; then ec__finalcmd="$ec__cmd" - else ec__finalcmd="$ec__cmd \$($ec__finalcmd)" - fi - fi - eval "$ec__finalcmd" -} - -function: base_setxc "équivalent à setx \$1 evalc \$2..@" -function base_setxc() { - local -a sx__args - if [ "$1" == -a ]; then sx__args=(-a); shift; fi - local sx__var="$1"; shift - if [[ "$sx__var" == *=* ]]; then - set -- "${sx__var#*=}" "$@" - sx__var="${sx__var%%=*}" - fi - sx__args=("${sx__args[@]}" "$sx__var") - setx "${sx__args[@]}" base_evalc "$@" -} - -################################################################################ -# Chainage - -function: base_evalp "Implémenter une syntaxe alternative permettant d'enchainer des traitements sur un flux de données. - -Par exemple, la commande -~~~ -evalp cmd1... // cmd2... // cmd3... -~~~ -affiche le résultat de la commande -~~~ -cmd1... | cmd2... | cmd3... -~~~ - -Typiquement, cette fonction permet de faciliter la *construction* d'un -enchainement de commandes par programme, ou de faciliter l'utilisation de la -fonction setx() pour récupérer le résultat d'un enchainement. Dans les autres -cas, il est plus simple et naturel d'écrire les enchainements avec la syntaxe de -bash." -function base_evalp() { - local ep__arg ep__cmd - - while [ $# -gt 0 ]; do - ep__arg="$1"; shift - if [ "$ep__arg" == // ]; then - ep__cmd="$ep__cmd |" - continue - elif [ "${ep__arg%//}" != "$ep__arg" ]; then - local ep__tmp="${ep__arg%//}" - if [ -z "${ep__tmp//\\/}" ]; then - ep__arg="${ep__arg#\\}" - fi - fi - ep__cmd="${ep__cmd:+$ep__cmd }\"$(_qval "$ep__arg")\"" - done - eval "$ep__cmd" -} - -function: base_setxp "équivalent à setx \$1 evalp \$2..@" -function base_setxp() { - local -a sp__args - if [ "$1" == -a ]; then sp__args=(-a); shift; fi - local sp__var="$1"; shift - if [[ "$sp__var" == *=* ]]; then - set -- "${sp__var#*=}" "$@" - sp__var="${sp__var%%=*}" - fi - sp__args=("${sp__args[@]}" "$sp__var") - setx "${sp__args[@]}" base_evalp "$@" -} - -function: base_cmdp "lancer une commande avec comme argument le résultat de evalp - -Par exemple, les deux commandes suivantes sont équivalentes: -~~~ -base_cmdp CMD ARGS... // EVALARGS -CMD ARGS... \"\$(evalp EVALARGS)\" -~~~" -function base_cmdp() { - local cp__arg - local -a cp__cmd - while [ $# -gt 0 ]; do - cp__arg="$1"; shift - [ "$cp__arg" == // ] && break - cp__cmd=("${cp__cmd[@]}" "$cp__arg") - done - "${cp__cmd[@]}" "$(base_evalp "$@")" -} - -################################################################################ -# Générique - -function: base_evalx "" -function base_evalx() { - : -} - -function: base_setxx "équivalent à setx \$1 evalx \$2..@" -function base_setxx() { - local -a sx__args - if [ "$1" == -a ]; then sx__args=(-a); shift; fi - local sx__var="$1"; shift - if [[ "$sx__var" == *=* ]]; then - set -- "${sx__var#*=}" "$@" - sx__var="${sx__var%%=*}" - fi - sx__args=("${sx__args[@]}" "$sx__var") - setx "${sx__args[@]}" base_evalx "$@" -} - -function: base_cmdx "lancer une commande avec comme argument le résultat de evalx - -Par exemple, les deux commandes suivantes sont équivalentes: -~~~ -base_cmdx CMD ARGS... // EVALARGS -CMD ARGS... \"\$(evalx EVALARGS)\" -~~~" -function base_cmdx() { - local cx__arg - local -a cx__cmd - while [ $# -gt 0 ]; do - cx__arg="$1"; shift - [ "$cx__arg" == // ] && break - cx__cmd=("${cx__cmd[@]}" "$cx__arg") - done - "${cx__cmd[@]}" "$(base_evalx "$@")" -} - -function: base_cmdsplitf "\ -Cette fonction doit être appelée avec N arguments (avec N>1). Elle analyse et -découpe l'argument \$N comme avec une ligne de commande du shell. Ensuite, elle -appelle la fonction \$1 avec les arguments de \$2 à \${N-1}, suivi des arguments -obtenus lors de l'analyse de l'argument \$N. Par exemple, la commande suivante: -~~~ -strsplitf cmd arg1 \"long arg2\" \"arg3 'long arg4'\" -~~~ -est équivalente à: -~~~ -cmd arg1 \"long arg2\" arg3 \"long arg4\" -~~~ - -Retourner le code 127 si la fonction à appeler n'est pas spécifiée. Retourner le -code 126 si une erreur s'est produite lors de l'analyse de l'argument \$N" -function base_cmdsplitf() { - [ $# -gt 0 ] || return 127 - local func count - func="$1"; shift - count=$# - if [ $count -gt 0 ]; then - eval 'set -- "${@:1:$(($count-1))}" '"${!count}" || return 126 - fi - "$func" "$@" -} - -################################################################################ -# Tests - -function: testx "Faire un test unaire avec la commande [ sur une valeur calculée avec evalx. - -Utiliser la syntaxe 'testx op cmds...' e.g. -~~~ -testx -z cmd1 // cmd2 -~~~" -function testx() { - local t__op="$1"; shift - local t__val="$(evalx "$@")" - [ $t__op "$t__val" ] -} - -function: test2x "Faire une test binaire avec la commande [ entre une valeur spécifiée et une valeur calculée avec evalx. - -Utiliser la syntaxe 'test2x value op cmds...' e.g. -~~~ -test2x value == cmd1 // cmd2 -~~~" -function test2x() { - local t__val1="$1"; shift - local t__op="$1"; shift - local t__val2="$(evalx "$@")" - [ "$t__val1" $t__op "$t__val2" ] -} - -function: testrx "Faire une test binaire avec la commande [[ entre une valeur spécifiée et une valeur calculée avec evalx. - -Utiliser la syntaxe 'testrx value op cmds...' e.g. -~~~ -testrx value == cmd1 // cmd2 -~~~" -function testrx() { - local t__val1="$1"; shift - local t__op="$1"; shift - local t__val2="$(evalx "$@")" - eval '[[ "$t__val1" '"$t__op"' "$t__val2" ]]' -} - -function: testp "Faire un test unaire avec la commande [ sur une valeur calculée avec evalp. - -Utiliser la syntaxe 'testp op cmds...' e.g. -~~~ -testp -z cmd1 // cmd2 -~~~" -function testp() { - local t__op="$1"; shift - local t__val="$(evalp "$@")" - [ $t__op "$t__val" ] -} - -function: test2p "Faire une test binaire avec la commande [ entre une valeur spécifiée et une valeur calculée avec evalp. - -Utiliser la syntaxe 'test2p value op cmds...' e.g. -~~~ -test2p value == cmd1 // cmd2 -~~~" -function test2p() { - local t__val1="$1"; shift - local t__op="$1"; shift - local t__val2="$(evalp "$@")" - [ "$t__val1" $t__op "$t__val2" ] -} - -function: testrp "Faire une test binaire avec la commande [[ entre une valeur spécifiée et une valeur calculée avec evalp. - -Utiliser la syntaxe 'testrp value op cmds...' e.g. -~~~ -testrp value == cmd1 // cmd2 -~~~" -function testrp() { - local t__val1="$1"; shift - local t__op="$1"; shift - local t__val2="$(evalp "$@")" - eval '[[ "$t__val1" '"$t__op"' "$t__val2" ]]' -} diff --git a/lib/nulib/bash/base.init b/lib/nulib/bash/base.init deleted file mode 100644 index 0661a5c..0000000 --- a/lib/nulib/bash/base.init +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.init base_ "Fonctions de base: initialiser l'environnement" - -if [ -z "$NULIB_NO_INIT_ENV" ]; then - # Emplacement du script courant - if [ "$0" == "-bash" ]; then - MYNAME= - MYDIR= - MYSELF= - elif [ ! -f "$0" -a -f "${0#-}" ]; then - MYNAME="$(basename -- "${0#-}")" - MYDIR="$(dirname -- "${0#-}")" - MYDIR="$(cd "$MYDIR"; pwd)" - MYSELF="$MYDIR/$MYNAME" - else - MYNAME="$(basename -- "$0")" - MYDIR="$(dirname -- "$0")" - MYDIR="$(cd "$MYDIR"; pwd)" - MYSELF="$MYDIR/$MYNAME" - fi - [ -n "$NULIBDIR" ] || NULIBDIR="$MYDIR" - - # Repertoire temporaire - [ -z "$TMPDIR" -a -d "$HOME/tmp" ] && TMPDIR="$HOME/tmp" - [ -z "$TMPDIR" ] && TMPDIR="${TMP:-${TEMP:-/tmp}}" - export TMPDIR - - # User - [ -z "$USER" -a -n "$LOGNAME" ] && export USER="$LOGNAME" - - # Le fichier nulibrc doit être chargé systématiquement - [ -f /etc/debian_chroot ] && NULIB_CHROOT=1 - [ -f /etc/nulibrc ] && . /etc/nulibrc - [ -f ~/.nulibrc ] && . ~/.nulibrc - - # Type de système sur lequel tourne le script - UNAME_SYSTEM=`uname -s` - [ "${UNAME_SYSTEM#CYGWIN}" != "$UNAME_SYSTEM" ] && UNAME_SYSTEM=Cygwin - [ "${UNAME_SYSTEM#MINGW32}" != "$UNAME_SYSTEM" ] && UNAME_SYSTEM=Mingw - UNAME_MACHINE=`uname -m` - if [ -n "$NULIB_CHROOT" ]; then - # Dans un chroot, il est possible de forcer les valeurs - [ -n "$NULIB_UNAME_SYSTEM" ] && eval "UNAME_SYSTEM=$NULIB_UNAME_SYSTEM" - [ -n "$NULIB_UNAME_MACHINE" ] && eval "UNAME_MACHINE=$NULIB_UNAME_MACHINE" - fi - - # Nom d'hôte respectivement avec et sans domaine - # contrairement à $HOSTNAME, cette valeur peut être spécifiée, comme par ruinst - [ -n "$MYHOST" ] || MYHOST="$HOSTNAME" - [ -n "$MYHOSTNAME" ] || MYHOSTNAME="${HOSTNAME%%.*}" - export MYHOST MYHOSTNAME -fi diff --git a/lib/nulib/bash/base.io b/lib/nulib/bash/base.io deleted file mode 100644 index e274f27..0000000 --- a/lib/nulib/bash/base.io +++ /dev/null @@ -1,1338 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.io base_ "Fonctions de base: affichage et saisie" -require: base.arr - -NULIB__TAB=$'\t' -NULIB__LATIN1=iso-8859-1 -NULIB__LATIN9=iso-8859-15 -NULIB__UTF8=utf-8 -NULIB_IENC="$NULIB__UTF8" -NULIB_OENC="$NULIB__UTF8" - -if [ ! -x "$(which iconv 2>/dev/null)" ]; then - function iconv() { cat; } -fi - -function nulib__lang_encoding() { - case "${LANG,,}" in - *@euro) echo "iso-8859-15";; - *.utf-8|*.utf8) echo "utf-8";; - *) echo "iso-8859-1";; - esac -} - -function nulib__norm_encoding() { - local enc="${1,,}" - enc="${enc//[-_]/}" - case "$enc" in - latin|latin1|iso8859|iso88591|8859|88591) echo "iso-8859-1";; - latin9|iso885915|885915) echo "iso-8859-15";; - utf|utf8) echo "utf-8";; - *) echo "$1";; - esac -} - -function nulib__init_encoding() { - local DEFAULT_ENCODING="$(nulib__lang_encoding)" - [ -n "$DEFAULT_ENCODING" ] || DEFAULT_ENCODING=utf-8 - [ -n "$NULIB_OUTPUT_ENCODING" ] || NULIB_OUTPUT_ENCODING="$DEFAULT_ENCODING" - NULIB_OUTPUT_ENCODING="$(nulib__norm_encoding "$NULIB_OUTPUT_ENCODING")" - [ -n "$NULIB_INPUT_ENCODING" ] || NULIB_INPUT_ENCODING="$NULIB_OUTPUT_ENCODING" - NULIB_INPUT_ENCODING="$(nulib__norm_encoding "$NULIB_INPUT_ENCODING")" - [ -n "$NULIB_EDITOR_ENCODING" ] || NULIB_EDITOR_ENCODING="$NULIB_INPUT_ENCODING" - NULIB_EDITOR_ENCODING="$(nulib__norm_encoding "$NULIB_EDITOR_ENCODING")" - - NULIB_IENC="$NULIB_INPUT_ENCODING" - NULIB_OENC="$NULIB_OUTPUT_ENCODING" -} -[ -n "$NULIB_LANG" -a -z "$LANG" ] && export NULIB_LANG LANG="$NULIB_LANG" -nulib__init_encoding - -function nulib_local() { -# Afficher les commandes pour rendre locales certaines variables en fonction des -# arguments: -# - opts rend locale args, pour utiliser parse_opts() à l'intérieur d'une -# fonction. -# - verbosity et interaction rendent respectivement locales NULIB_VERBOSITY et -# NULIB_INTERACTION. Ceci est utile pour pouvoir appeler sans risque de -# pollution de l'environnement une fonction qui utilise parse_opts() avec les -# définitions de PRETTYOPTS. -# Si aucun arguments n'est fourni, toutes les définitions sont affichées. - local arg - [ $# -gt 0 ] || set -- opts verbosity interaction - for arg in "$@"; do - case "$arg" in - parse_opts|opts|o|args) echo "local -a args";; - verbosity|v) echo "local NULIB_VERBOSITY='$NULIB_VERBOSITY'";; - interaction|i) echo "local NULIB_INTERACTION='$NULIB_INTERACTION'";; - esac - done -} - -function noerror() { -# lancer la commande "$@" et masquer son code de retour - [ $# -gt 0 ] || set : - "$@" || return 0 -} - -function noout() { -# lancer la commande "$@" en supprimant sa sortie standard - [ $# -gt 0 ] || return 0 - "$@" >/dev/null -} - -function noerr() { -# lancer la commande "$@" en supprimant sa sortie d'erreur - [ $# -gt 0 ] || return 0 - "$@" 2>/dev/null -} - -function stdredir() { - # Lancer la commande $4..@ en redirigeant stdin depuis $1, stdout vers $2, - # stderr vers $3. Si $1 est vide ou vaut /dev/stdin, la redirection n'est - # pas faite. Si $2 est vide ou vaut /dev/stdout, la redirection n'est pas - # faite. Si $3 est vide ou vaut /dev/stderr, la redirection n'est pas faite. - # Cette fonction existe parce que sur certaines versions de bash, il semble - # que les redirections /dev/std* ne sont pas traitées de façon particulière. - # De plus, sur des technologies telles que OpenVZ, les chemins /dev/std* ne - # sont pas créés (parce que /proc/self/fd/* n'est pas accessible). Donc, - # dans de rares cas où le script tourne sur OpenVZ avec une version de bash - # qui est buggée, la redirection n'est pas faite correctement. - local __redirs __in __out __err - if [ -n "$1" -o "$1" == /dev/stdin ]; then - if [ "${1#<}" != "$1" ]; then - __in="${1#<}" - else - __in="$1" - fi - __redirs="$__redirs"' <"$__in"' - fi; shift - if [ -n "$1" -o "$1" == /dev/stdout ]; then - if [ "${1#>>}" != "$1" ]; then - __out="${1#>>}" - __redirs="$__redirs"' >>"$__out"' - elif [ "${1#>}" != "$1" ]; then - __out="${1#>}" - __redirs="$__redirs"' >"$__out"' - else - __out="$1" - __redirs="$__redirs"' >"$__out"' - fi - fi; shift - if [ -n "$1" -o "$1" == /dev/stderr ]; then - if [ "${1#>>}" != "$1" ]; then - __err="${1#>>}" - __redirs="$__redirs"' 2>>"$__err"' - elif [ "${1#>}" != "$1" ]; then - __err="${1#>}" - __redirs="$__redirs"' 2>"$__err"' - else - __err="$1" - __redirs="$__redirs"' 2>"$__err"' - fi - fi; shift - eval '"$@"'"$__redirs" -} - -function isatty() { -# tester si STDOUT n'est pas une redirection - tty -s <&1 -} - -function in_isatty() { -# tester si STDIN n'est pas une redirection - tty -s -} - -function out_isatty() { -# tester si STDOUT n'est pas une redirection. identique à isatty() - tty -s <&1 -} - -function err_isatty() { -# tester si STDERR n'est pas une redirection - tty -s <&2 -} - -################################################################################ -# affichage - -function tooenc() { -# Transformer la valeur $1 de l'encoding $2(=$NULIB_OENC) vers l'encoding de sortie -# $3=($NULIB_OUTPUT_ENCODING) - local src="$1" from="${2:-$NULIB_OENC}" to="${3:-$NULIB_OUTPUT_ENCODING}" - if [ "$from" == "$to" ]; then - recho "$src" - else - iconv -f "$from" -t "$to" <<<"$src" - fi -} - -function uecho() { - tooenc "$*" -} - -function tooenc_() { -# Transformer la valeur $1 de l'encoding $2(=$NULIB_OENC) vers l'encoding de sortie -# $3=($NULIB_OUTPUT_ENCODING) - local src="$1" from="${2:-$NULIB_OENC}" to="${3:-$NULIB_OUTPUT_ENCODING}" - if [ "$from" == "$to" ]; then - recho_ "$src" - else - recho_ "$src" | iconv -f "$from" -t "$to" - fi -} - -function uecho_() { - tooenc_ "$*" -} - -function stooenc() { ### XXX -# Transformer la valeur lue sur stdin de $NULIB_OENC vers l'encoding de sortie par -# défaut ($NULIB_OUTPUT_ENCODING) - local from="${1:-$NULIB_OENC}" to="${2:-$NULIB_OUTPUT_ENCODING}" - if [ "$from" == "$to" ]; then - cat - else - iconv -f "$from" -t "$to" - fi -} - -# faut-il dater les messages de etitle, estep, ebegin? -# Faire NULIB_EDATE=1 en début de script pour activer cette fonctionnalité -export NULIB_EDATE -function __edate() { [ -n "$NULIB_EDATE" ] && date +"[%d/%m/%Y-%H:%M:%S] "; } - -export NULIB_ELOG_OVERWRITE -function __set_no_colors() { :; } -function elogto() { -# Activer NULIB_EDATE et rediriger STDOUT et STDERR vers le fichier $1 -# Si deux fichiers sont spécifiés, rediriger STDOUT vers $1 et STDERR vers $2 -# Si aucun fichier n'est spécifié, ne pas faire de redirection -# Si la redirection est activée, forcer l'utilisation de l'encoding UTF8 -# Si NULIB_ELOG_OVERWRITE=1, alors le fichier en sortie est écrasé. Sinon, les -# lignes en sortie lui sont ajoutées - NULIB_EDATE=1 - if [ -n "$1" -a -n "$2" ]; then - LANG=fr_FR.UTF8 - NULIB_OUTPUT_ENCODING="$NULIB__UTF8" - __set_no_colors 1 - if [ -n "$NULIB_ELOG_OVERWRITE" ]; then - exec >"$1" 2>"$2" - else - exec >>"$1" 2>>"$2" - fi - elif [ -n "$1" ]; then - LANG=fr_FR.UTF8 - NULIB_OUTPUT_ENCODING="$NULIB__UTF8" - __set_no_colors 1 - if [ -n "$NULIB_ELOG_OVERWRITE" ]; then - exec >"$1" 2>&1 - else - exec >>"$1" 2>&1 - fi - fi -} - -# variables utilisées pour l'affichage indenté des messages et des titres -# __estack est la liste des invocations de 'ebegin' et 'etitle' en cours -# __tlevel est l'indentation à appliquer avant d'afficher le message -export __estack __tlevel -function __indent() { -# indenter les lignes de $1, sauf la première - if [ "${1/ -/}" != "$1" ]; then - sed "2,\$s/^/${__tlevel}/g" <<<"$1" - else - recho "$1" - fi -} -# fonctions à surcharger pour modifier la façon dont les messages sont affichés -function __eerror() { tooenc "$(__edate)${__tlevel}ERROR $(__indent "$1")"; } -function __ewarn() { tooenc "$(__edate)${__tlevel}WARNING $(__indent "$1")"; } -function __enote() { tooenc "$(__edate)${__tlevel}NOTE $(__indent "$1")"; } -function __ebanner() { - local maxi="${COLUMNS:-80}" - local -a lines - local psfix line - - psfix="$(__edate)${__tlevel}" - while [ ${#psfix} -lt $maxi ]; do psfix="$psfix="; done - - tooenc "$psfix" - maxi=$(($maxi - 1)) - base_array_xsplitl lines "$1" - for line in "" "${lines[@]}" ""; do - line="$(__edate)${__tlevel}= $line" - if [ ${#line} -le $maxi ]; then - while [ ${#line} -lt $maxi ]; do line="$line "; done - line="$line=" - fi - tooenc "$line" - done - tooenc "$psfix" -} -function __eimportant() { tooenc "$(__edate)${__tlevel}IMPORTANT $(__indent "$1")"; } -function __eattention() { tooenc "$(__edate)${__tlevel}ATTENTION $(__indent "$1")"; } -function __einfo() { tooenc "$(__edate)${__tlevel}INFO $(__indent "$1")"; } -function __eecho() { tooenc "$(__edate)${__tlevel}$(__indent "$1")"; } -function __eecho_() { tooenc_ "$(__edate)${__tlevel}$(__indent "$1")"; } -function __edebug() { tooenc "$(__edate)${__tlevel}DEBUG $(__indent "$1")"; } -function __estep() { tooenc "$(__edate)${__tlevel}. $(__indent "$1")"; } -function __estepe() { tooenc "$(__edate)${__tlevel}.E $(__indent "$1")"; } -function __estepw() { tooenc "$(__edate)${__tlevel}.W $(__indent "$1")"; } -function __estepn() { tooenc "$(__edate)${__tlevel}.N $(__indent "$1")"; } -function __estepi() { tooenc "$(__edate)${__tlevel}.I $(__indent "$1")"; } -function __estep_() { tooenc_ "$(__edate)${__tlevel}. $(__indent "$1")"; } -function __estepe_() { tooenc_ "$(__edate)${__tlevel}.E $(__indent "$1")"; } -function __estepw_() { tooenc_ "$(__edate)${__tlevel}.W $(__indent "$1")"; } -function __estepn_() { tooenc_ "$(__edate)${__tlevel}.N $(__indent "$1")"; } -function __estepi_() { tooenc_ "$(__edate)${__tlevel}.I $(__indent "$1")"; } -function __etitle() { tooenc "$(__edate)${__tlevel}=== $(__indent "$1")"; } -function __ebegin() { tooenc_ "$(__edate)${__tlevel}. $(__indent "$1"): "; } -function __edoto() { echo_ "."; } -function __edotw() { echo_ "w"; } -function __edotx() { echo_ "x"; } -function __edotp() { echo_ "+"; } -function __edotd() { tooenc "($1)"; } -function __eendo() { echo "[ok]"; } -function __eendx() { echo "[error]"; } -PRETTYOPTS=() -function set_verbosity() { :;} -function set_interaction() { :;} -function show_error() { -# tester respectivement si on doit afficher les messages d'erreur, -# d'avertissement, d'information, de debug - return 0 -} -function show_warn() { - return 0 -} -function show_info() { - return 0 -} -function show_verbose() { - return 0 -} -function show_debug() { - [ -n "$DEBUG" ] -} -function check_verbosity() { - return 0 -} -function get_verbosity_option() { :;} -function check_interaction() { - return 0 -} - -# note: toutes les fonctions d'affichage e* écrivent sur stderr -__epending= -function eflush() { -# Afficher les messages en attente - if [ -n "$__epending" ]; then recho "$__epending" 1>&2; __epending=; fi -} -function eclearp() { -# Supprimer les message en attente - __epending= -} -function eerror() { -# Afficher un message d'erreur - show_error || return; eflush; __eerror "$*" 1>&2 -} - -function die() { - [ $# -gt 0 ] && base_eerror "$@" - exit 1 -} - -function exit_with { - if [ $# -gt 0 ]; then "$@"; fi - exit $? -} - -function die_with { - [ $# -gt 0 ] && base_eerror "$1" - shift - [ $# -gt 0 ] && "$@" - exit 1 -} - -function die_unless() { - # Afficher $1 et quitter le script avec die() si la commande $2..@ retourne FAUX - local du__r - local du__msg="$1"; shift - if [ $# -eq 0 ]; then - [ -n "$du__msg" ] && base__eerror "$du__msg" - exit 1 - elif "$@"; then - : - else - du__r=$? - [ -n "$du__msg" ] && base__eerror "$du__msg" - exit $du__r - fi - return 0 -} - -function eerror_unless() { - # Afficher $1 avec base_eerror() si la commande $2..@ retourne FAUX. dans tous les cas, retourner le code de retour de la commande. - local eu__r - local eu__msg="$1"; shift - if [ $# -eq 0 ]; then - [ -n "$eu__msg" ] && base__eerror "$eu__msg" - return 1 - elif "$@"; then - : - else - eu__r=$? - [ -n "$eu__msg" ] && base__eerror "$eu__msg" - return $eu__r - fi - return 0 -} - -function die_if() { - # Afficher $1 et quitter le script avec die() si la commande $2..@ retourne VRAI. sinon, retourner le code de retour de la commande - local di__r=0 - local di__msg="$1"; shift - [ $# -eq 0 ] && return 0 - if "$@"; then - [ -n "$di__msg" ] && base__eerror "$di__msg" - exit 0 - else - di__r=$? - fi - return $di__r -} - -function eerror_if() { - # Afficher $1 avec base_eerror() si la commande $2..@ retourne VRAI. dans tous les cas, retourner le code de retour de la commande. - local ei__r=0 - local ei__msg="$1"; shift - [ $# -eq 0 ] && return 0 - if "$@"; then - [ -n "$ei__msg" ] && base__eerror "$ei__msg" - else - ei__r=$? - fi - return $ei__r -} - -function ewarn() { -# Afficher un message d'avertissement - show_warn || return; eflush; __ewarn "$*" 1>&2 -} -function enote() { -# Afficher un message d'information de même niveau qu'un avertissement - show_info || return; eflush; __enote "$*" 1>&2 -} -function ebanner() { -# Afficher un message très important encadré, puis attendre 5 secondes - show_error || return; eflush; __ebanner "$*" 1>&2; sleep 5 -} -function eimportant() { -# Afficher un message très important - show_error || return; eflush; __eimportant "$*" 1>&2 -} -function eattention() { -# Afficher un message important - show_warn || return; eflush; __eattention "$*" 1>&2 -} -function einfo() { -# Afficher un message d'information - show_info || return; eflush; __einfo "$*" 1>&2 -} -function eecho() { -# Afficher un message d'information sans préfixe - show_info || return; eflush; __eecho "$*" 1>&2 -} -function eecho_() { - show_info || return; eflush; __eecho_ "$*" 1>&2 -} -function edebug() { -# Afficher un message de debug - show_debug || return; eflush; __edebug "$*" 1>&2 -} -function trace() { -# Afficher la commande $1..@, la lancer, puis afficher son code d'erreur si une -# erreur se produit - local r cmd="$(qvals "$@")" - show_info && { eflush; __eecho "\$ $cmd" 1>&2; } - "$@"; r=$? - if [ $r -ne 0 ]; then - if show_info; then - eflush; __eecho "^ [EC #$r]" 1>&2 - elif show_error; then - eflush; __eecho "^ $cmd [EC #$r]" 1>&2; - fi - fi - return $r -} -function trace_error() { -# Lancer la commande $1..@, puis afficher son code d'erreur si une erreur se -# produit. La différence avec trace() est que la commande n'est affichée que si -# une erreur se produit. - local r - "$@"; r=$? - if [ $r -ne 0 ]; then - local cmd="$(qvals "$@")" - show_error && { eflush; __eecho "^ $cmd [EC #$r]" 1>&2; } - fi - return $r -} - -function etitle() { -# Afficher le titre $1, qui est le début éventuel d'une section. Les section -# imbriquées sont affichées indentées. La section n'est pas terminée, et il faut -# la terminer explicitement avec eend, sauf dans certains cas précis: -# - Si $2..$* est spécifié, c'est une commande. Lancer la commande dans le -# contexte de la section. Puis, la section est automatiquement terminée sauf si -# l'option -s est spécifiée, auquel cas la section reste ouverte. Si l'option -p -# est spécifiée, eclearp() est appelé pour purger les messages en attente -# - Dans le cas contraire, l'option -s est ignorée: la section doit toujours -# être terminée explicitement. -# La fonction etitled() est comme etitle(), mais le titre n'est pas affiché -# immédiatement. L'affichage effectif est effectué dès qu'une fonction e* est -# utilisée. Ceci permet, avec la fonction eclearp(), de ne pas afficher de titre -# pour une section vide. - local __t_deferred= - __t_etitle "$@" -} -function etitled() { - local __t_deferred=1 - __t_etitle "$@" -} -function __t_etitle() { - local __t_eend=default - local __t_clearp= - while [ -n "$1" ]; do - if [ "$1" == "--" ]; then - shift - break - elif [ "$1" == "-s" ]; then - __t_eend= - shift - elif [ "$1" == "--eend" ]; then - __t_eend=1 - shift - elif [ "$1" == "-p" ]; then - __t_clearp=1 - shift - else - break - fi - done - local __t_title="$1"; shift - local __t_s=0 - # etitle - [ -n "$__estack" ] && __tlevel="${__tlevel} " - __estack="$__estack:t" - if show_info; then - if [ -n "$__t_deferred" ]; then - __epending="${__epending:+$__epending -}$(__etitle "$__t_title")" - else - eflush - __etitle "$__t_title" 1>&2 - fi - fi - # commande - if [ $# -gt 0 ]; then - "$@" - __t_s=$? - [ "$__t_eend" == "default" ] && __t_eend=1 - fi - # eend - [ "$__t_eend" == "default" ] && __t_eend= - if [ -n "$__t_eend" ]; then - eend $__t_s - [ -n "$__t_clearp" ] && eclearp - fi - return $__t_s -} -function estep() { -# Afficher la description d'une opération. Cette fonction est particulièrement -# appropriée dans le contexte d'un etitle. -# Les variantes e (error), w (warning), n (note), i (info) permettent d'afficher -# des couleurs différentes, mais toutes sont du niveau info. - show_info || return; eflush; __estep "$*" 1>&2 -} -function estepe() { - show_info || return; eflush; __estepe "$*" 1>&2 -} -function estepw() { - show_info || return; eflush; __estepw "$*" 1>&2 -} -function estepn() { - show_info || return; eflush; __estepn "$*" 1>&2 -} -function estepi() { - show_info || return; eflush; __estepi "$*" 1>&2 -} -function estep_() { - show_info || return; eflush; __estep_ "$*" 1>&2 -} -function estepe_() { - show_info || return; eflush; __estepe_ "$*" 1>&2 -} -function estepw_() { - show_info || return; eflush; __estepw_ "$*" 1>&2 -} -function estepn_() { - show_info || return; eflush; __estepn_ "$*" 1>&2 -} -function estepi_() { - show_info || return; eflush; __estepi_ "$*" 1>&2 -} -function ebegin() { -# Afficher le message $1, qui décrit le début d'une opération. Cette fonction -# débute une section, qu'il faut terminer avec eend. -# Si $2..$* est spécifié, c'est une commande. Lancer la commande dans le -# contexte de la section. Puis, la section est terminée automatiquement, sauf si -# l'option -s est spécifiée, auquel cas la section reste ouverte. - local __b_eend=default - while [ -n "$1" ]; do - if [ "$1" == "--" ]; then - shift - break - elif [ "$1" == "-s" ]; then - __b_eend= - shift - elif [ "$1" == "--eend" ]; then - __b_eend=1 - shift - else - break - fi - done - local __b_msg="$1"; shift - local __b_s=0 - # ebegin - __estack="$__estack:b" - if show_info; then - eflush - __ebegin "$__b_msg" 1>&2 - fi - # commande - if [ $# -gt 0 ]; then - "$@" - __b_s=$? - [ "$__b_eend" == "default" ] && __b_eend=1 - fi - # eend - [ "$__b_eend" == "default" ] && __b_eend= - [ -n "$__b_eend" ] && eend $__b_s - return $__b_s -} -function edot() { -# Afficher une étape d'une opération, matérialisée par un point '.' ou une -# croix 'x' en cas de succès ou d'erreur. Cette fonction est particulièrement -# appropriée dans le contexte d'un ebegin. - local s=$? - show_info || return - eflush - [ -n "$1" ] && s="$1" - shift - if [ "$s" == "0" ]; then - __edoto 1>&2 - else - __edotx 1>&2 - fi - show_verbose && [ $# -gt 0 ] && __edotd "$*" 1>&2 - return $s -} -function edotw() { -# Afficher un avertissement comme étape d'une opération, matérialisée par une -# lettre 'w' (typiquement de couleur jaune). Cette fonction est particulièrement -# appropriée dans le contexte d'un ebegin. - local s=$? - show_info || return - eflush - [ -n "$1" ] && s="$1" - shift - __edotw 1>&2 - show_verbose && [ $# -gt 0 ] && __edotd "$*" 1>&2 - return $s -} -function ewait() { -# Afficher les étapes d'une opération qui dure, matérialisées par des '+' toutes -# les secondes tant que le processus $1 tourne. -# A utiliser de cette manière: -# ebegin "msg" -# cmd & -# ewait $! -# eend - [ -n "$1" ] || return 1 - if show_info; then - local count=2 - eflush - little_sleep # certains processus retournent tout de suite - while is_running "$1"; do - sleep 1 - if [ $count -gt 0 ]; then - # attendre 2 secondes avant de commencer à afficher des '+' - count=$(($count - 1)) - else - __edotp 1>&2 - fi - done - # terminer par un '.' - __edoto 1>&2 - else - # ne rien afficher, mais attendre quand même la fin de l'opération - wait "$1" - fi -} -function eend() { -# Terminer une section. -# Avec l'option -c, remettre à zéro toutes les informations de section -# Si la section en cours est un ebegin, afficher la fin de l'opération: [ok] ou -# [error] en fonction du code de retour de la dernière commande (ou de $1 si -# cette valeur est donnée) -# Si la section en cours est un etitle, marquer la fin de la section concernée -# par le titre. - local s=$? - if [ "$1" == "-c" ]; then - __estack= - __tlevel= - elif [ "${__estack%:b}" != "$__estack" ]; then - # terminer ebegin - __estack="${__estack%:b}" - show_info || return - eflush - [ -n "$1" ] && s="$1" - if [ "$s" == "0" ]; then - __eendo 1>&2 - else - __eendx 1>&2 - fi - elif [ "${__estack%:t}" != "$__estack" ]; then - # terminer etitle -s - __estack="${__estack%:t}" - __tlevel="${__tlevel% }" - fi -} -function __elinedots() { - ebegin "$1" - local line - if show_debug; then - while read line; do - __edoto 1>&2 - __edotd "$line" 1>&2 - done - else - while read line; do - __edoto 1>&2 - done - fi - eend -} -function elinedots() { -# Afficher un message comme avec ebegin "$1", puis afficher un point '.' pour -# chaque ligne lue sur stdin. Cela permet de suivre une opération. En mode -# DEBUG, afficher la ligne affichée plutôt qu'un point. -# Si $2..$* sont spécifiés, lancer la commande et suivre sa sortie. Ainsi, -# 'elinedots msg cmd args' est un raccourci pour 'cmd args | elinedots msg' - local msg="$1"; shift - if [ $# -gt 0 ]; then - "$@" | __elinedots "$msg" - else - __elinedots "$msg" - fi -} - -################################################################################ -# saisie - -function toienc() { -# Transformer la valeur de la variable $1 de l'encoding d'entrée -# $3(=$NULIB_INPUT_ENCODING) vers l'encoding $2(=$NULIB_IENC) - local __tie_var="$1" __tie_to="${2:-$NULIB_IENC}" __tie_from="${3:-$NULIB_INPUT_ENCODING}" - if [ "$__tie_from" != "$__tie_to" ]; then - _setv "$__tie_var" "$(iconv -f "$__tie_from" -t "$__tie_to" <<<"${!__tie_var}")" - fi -} - -function uread() { -# Lire une valeur sur stdin et la placer dans la variable $1. On assume que la -# valeur en entrée est encodée dans l'encoding d'entrée par défaut - [ $# -gt 0 ] || set -- REPLY - local __r_var - read "$@" - for __r_var in "$@"; do - [ -z "$__r_var" -o "${__r_var:0:1}" == "-" ] && continue # ignorer les options - toienc "$__r_var" - done -} - -function stoienc() { ### XXX -# Transformer la valeur lue sur stdin de $NULIB_IENC vers l'encoding d'entrée par -# défaut ($NULIB_INPUT_ENCODING) - local to="${1:-$NULIB_IENC}" from="${2:-$NULIB_INPUT_ENCODING}" - if [ "$from" == "$to" ]; then - cat - else - iconv -f "$from" -t "$to" - fi -} - - - -function is_interaction() { - return 1 -} - -function get_interaction_option() { :;} - -function ask_yesno() { -# Afficher le message $1 suivi de [oN] ou [On] suivant que $2 vaut O ou N, puis -# lire la réponse. Retourner 0 si la réponse est vrai, 1 sinon. -# Si $1 est une option, elle est utilisée avec check_interaction pour savoir si -# on est en mode interactif ou non. A ce moment-là, les valeurs sont décalées -# ($2=message, $3=default) -# Si $2 vaut C, la valeur par défaut est N si on est interactif, O sinon -# Si $2 vaut X, la valeur par défaut est O si on est interactif, N sinon - local interactive=1 - if [[ "$1" == -* ]]; then - if [ "$1" != -- ]; then - check_interaction "$1" || interactive= - fi - shift - else - check_interaction -c || interactive= - fi - local default="${2:-N}" - if [ "$default" == "C" ]; then - [ -n "$interactive" ] && default=N || default=O - elif [ "$default" == "X" ]; then - [ -n "$interactive" ] && default=O || default=N - fi - if [ -n "$interactive" ]; then - eflush - local message="$1" - local prompt="[oN]" - local r - is_yes "$default" && prompt="[On]" - if [ -n "$message" ]; then - __eecho_ "$message" 1>&2 - else - NULIB_OENC="$NULIB__UTF8" __eecho_ "Voulez-vous continuer?" 1>&2 - fi - NULIB_OENC="$NULIB__UTF8" tooenc_ " $prompt " 1>&2 - uread r - is_yes "${r:-$default}" - else - is_yes "$default" - fi -} - -function ask_any() { -# Afficher le message $1 suivi du texte "[$2]" (qui vaut par défaut +Oq), puis -# lire la réponse. Les lettres de la chaine de format $2 sont numérotées de 0 à -# $((${#2} - 1)). Le code de retour est le numéro de la lettre qui a été -# sélectionnée. Cette fonction est une généralisation de ask_yesno() pour -# n'importe quel ensemble de lettres. -# La première lettre en majuscule est la lettre sélectionnée par défaut. -# La lettre O matche toutes les lettres qui signifient oui: o, y, 1, v, t -# La lettre N matche toutes les lettres qui signifient non: n, f, 0 -# Il y a des raccourcis: -# +O --> On -# +N --> oN -# +C --> oN si on est en mode interactif, On sinon -# +X --> On si on est en mode interactifn oN sinon -# Si $1 est une option, elle est utilisée avec check_interaction pour savoir si -# on est en mode interactif ou non. A ce moment-là, les valeurs sont décalées -# ($2=message, $3=format) - local interactive=1 - if [[ "$1" == -* ]]; then - if [ "$1" != -- ]; then - check_interaction "$1" || interactive= - fi - shift - else - check_interaction -c || interactive= - fi - local format="${2:-+Oq}" - format="${format/+O/On}" - format="${format/+N/oN}" - if [ -n "$interactive" ]; then - format="${format/+C/oN}" - format="${format/+X/On}" - else - format="${format/+C/On}" - format="${format/+X/oN}" - fi - local i count="${#format}" - - if [ -n "$interactive" ]; then - eflush - local message="${1:-Voulez-vous continuer?}" - local prompt="[$format]" - local r f lf defi - while true; do - __eecho_ "$message $prompt " 1>&2 - uread r - r="$(strlower "${r:0:1}")" - i=0; defi= - while [ $i -lt $count ]; do - f="${format:$i:1}" - lf="$(strlower "$f")" - [ "$r" == "$lf" ] && return $i - if [ -z "$defi" ]; then - [ -z "${f/[A-Z]/}" ] && defi="$i" - fi - if [ "$lf" == o ]; then - case "$r" in o|y|1|v|t) return $i;; esac - elif [ "$lf" == n ]; then - case "$r" in n|f|0) return $i;; esac - fi - i=$(($i + 1)) - done - [ -z "$r" ] && return ${defi:-0} - done - else - i=0 - while [ $i -lt $count ]; do - f="${format:$i:1}" - [ -z "${f/[A-Z]/}" ] && return $i - i=$(($i + 1)) - done - return 0 - fi -} - -function read_value() { -# Afficher le message $1 suivi de la valeur par défaut [$3] si elle est non -# vide, puis lire la valeur donnée par l'utilisateur. Cette valeur doit être non -# vide si $4(=O) est vrai. La valeur saisie est placée dans la variable -# $2(=value) -# Si $1 est une option, elle est utilisée avec check_interaction pour savoir si -# on est en mode interactif ou non. A ce moment-là, les valeurs sont décalées -# ($2=message, $3=variable, $4=default, $5=required) -# En mode non interactif, c'est la valeur par défaut qui est sélectionnée. Si -# l'utilisateur requière que la valeur soit non vide et que la valeur par défaut -# est vide, afficher un message d'erreur et retourner faux -# read_password() est comme read_value(), mais la valeur saisie n'est pas -# affichée, ce qui la rend appropriée pour la lecture d'un mot de passe. - local -a __rv_opts __rv_readline=1 __rv_showdef=1 __rv_nl= - __rv_opts=() - [ -n "$NULIB_NO_READLINE" ] && __rv_readline= - __rv_read "$@" -} - -function read_password() { - local -a __rv_opts __rv_readline= __rv_showdef= __rv_nl=1 - __rv_opts=(-s) - __rv_read "$@" -} - -function __rv_read() { - local __rv_int=1 - if [[ "$1" == -* ]]; then - if [ "$1" != -- ]; then - check_interaction "$1" || __rv_int= - fi - shift - else - check_interaction -c || __rv_int= - fi - local __rv_msg="$1" __rv_v="${2:-value}" __rv_d="$3" __rv_re="${4:-O}" - if [ -z "$__rv_int" ]; then - # En mode non interactif, retourner la valeur par défaut - if is_yes "$__rv_re" && [ -z "$__rv_d" ]; then - NULIB_OENC="$NULIB__UTF8" eerror "La valeur par défaut de $__rv_v doit être non vide" - return 1 - fi - _setv "$__rv_v" "$__rv_d" - return 0 - fi - - eflush - local __rv_r - while true; do - if [ -n "$__rv_msg" ]; then - __eecho_ "$__rv_msg" 1>&2 - else - NULIB_OENC="$NULIB__UTF8" __eecho_ "Entrez la valeur" 1>&2 - fi - if [ -n "$__rv_readline" ]; then - NULIB_OENC="$NULIB__UTF8" tooenc_ ": " 1>&2 - uread -e ${__rv_d:+-i"$__rv_d"} "${__rv_opts[@]}" __rv_r - else - if [ -n "$__rv_d" ]; then - if [ -n "$__rv_showdef" ]; then - tooenc_ " [$__rv_d]" 1>&2 - else - tooenc_ " [****]" 1>&2 - fi - fi - NULIB_OENC="$NULIB__UTF8" tooenc_ ": " 1>&2 - uread "${__rv_opts[@]}" __rv_r - [ -n "$__rv_nl" ] && echo - fi - __rv_r="${__rv_r:-$__rv_d}" - if [ -n "$__rv_r" ] || ! is_yes "$__rv_re"; then - _setv "$__rv_v" "$__rv_r" - return 0 - fi - done -} - -function simple_menu() { -# Afficher un menu simple dont les éléments sont les valeurs du tableau -# $2(=options). L'option choisie est placée dans la variable $1(=option) -# -t TITLE: spécifier le titre du menu -# -m YOUR_CHOICE: spécifier le message d'invite pour la sélection de l'option -# -d DEFAULT: spécifier l'option par défaut. Par défaut, prendre la valeur -# actuelle de la variable $1(=option) - local __sm_title= __sm_yourchoice= __sm_default= - local -a __sm_args - parse_opts -t: __sm_title= -m: __sm_yourchoice= -d: __sm_default= @ __sm_args -- "$@" && - set -- "${__sm_args[@]}" || ewarn "$__sm_args" - - local __sm_option_var="${1:-option}" __sm_options_var="${2:-options}" - local __sm_option __sm_options - __sm_options="$__sm_options_var[*]" - if [ -z "${!__sm_options}" ]; then - NULIB_OENC="$NULIB__UTF8" eerror "Le tableau $__sm_options_var doit être non vide" - return 1 - fi - [ -z "$__sm_default" ] && __sm_default="${!__sm_option_var}" - - eflush - base_array_copy __sm_options "$__sm_options_var" - local __sm_c=0 __sm_i __sm_choice - while true; do - if [ "$__sm_c" == "0" ]; then - # Afficher le menu - [ -n "$__sm_title" ] && __eecho "=== $__sm_title ===" 1>&2 - __sm_i=1 - for __sm_option in "${__sm_options[@]}"; do - if [ "$__sm_option" == "$__sm_default" ]; then - __eecho "$__sm_i*- $__sm_option" 1>&2 - else - __eecho "$__sm_i - $__sm_option" 1>&2 - fi - let __sm_i=$__sm_i+1 - done - fi - - # Afficher les choix - if [ -n "$__sm_yourchoice" ]; then - __eecho_ "$__sm_yourchoice" 1>&2 - else - NULIB_OENC="$NULIB__UTF8" __eecho_ "Entrez le numéro de l'option choisie" 1>&2 - fi - NULIB_OENC="$NULIB__UTF8" tooenc_ ": " 1>&2 - uread __sm_choice - - # Valeur par défaut - if [ -z "$__sm_choice" -a -n "$__sm_default" ]; then - __sm_option="$__sm_default" - break - fi - # Vérifier la saisie - if [ -n "$__sm_choice" -a -z "${__sm_choice//[0-9]/}" ]; then - if [ "$__sm_choice" -gt 0 -a "$__sm_choice" -le "${#__sm_options[*]}" ]; then - __sm_option="${__sm_options[$(($__sm_choice - 1))]}" - break - else - NULIB_OENC="$NULIB__UTF8" eerror "Numéro d'option incorrect" - fi - else - NULIB_OENC="$NULIB__UTF8" eerror "Vous devez saisir le numéro de l'option choisie" - fi - - let __sm_c=$__sm_c+1 - if [ "$__sm_c" -eq 5 ]; then - # sauter une ligne toutes les 4 tentatives - NULIB_OENC="$NULIB__UTF8" tooenc "" 1>&2 - __sm_c=0 - fi - done - _setv "$__sm_option_var" "$__sm_option" -} - -function actions_menu() { -# Afficher un menu dont les éléments sont les valeurs du tableau $4(=options), -# et une liste d'actions tirées du tableau $3(=actions). L'option choisie est -# placée dans la variable $2(=option). L'action choisie est placée dans la -# variable $1(=action) -# Un choix est saisi sous la forme [action]num_option -# -t TITLE: spécifier le titre du menu -# -m OPT_YOUR_CHOICE: spécifier le message d'invite pour la sélection de -# l'action et de l'option -# -M ACT_YOUR_CHOICE: spécifier le message d'invite dans le cas où aucune option -# n'est disponible. Dans ce cas, seules les actions vides sont possibles. -# -e VOID_ACTION: spécifier qu'une action est vide, c'est à dire qu'elle ne -# requière pas d'être associée à une option. Par défaut, la dernière action -# est classée dans cette catégorie puisque c'est l'action "quitter" -# -d DEFAULT_ACTION: choisir l'action par défaut. par défaut, c'est la première -# action. -# -q QUIT_ACTION: choisir l'option "quitter" qui provoque la sortie du menu sans -# choix. par défaut, c'est la dernière action. -# -o DEFAULT_OPTION: choisir l'option par défaut. par défaut, prendre la valeur -# actuelle de la variable $2(=option) - local -a __am_action_descs __am_options __am_void_actions - local __am_tmp __am_select_action __am_select_option __am_title __am_optyc __am_actyc - local __am_default_action=auto __am_quit_action=auto - local __am_default_option= - local -a __am_args - parse_opts \ - -t: __am_title= \ - -m: __am_optyc= \ - -M: __am_actyc= \ - -e: __am_void_actions \ - -d: __am_default_action= \ - -q: __am_quit_action= \ - -o: __am_default_option= \ - @ __am_args -- "$@" && set -- "${__am_args[@]}" || { eerror "$__am_args"; return 1; } - - __am_tmp="${1:-action}"; __am_select_action="${!__am_tmp}" - __am_tmp="${2:-option}"; __am_select_option="${!__am_tmp}" - [ -n "$__am_default_option" ] && __am_select_option="$__am_default_option" - base_array_copy __am_action_descs "${3:-actions}" - base_array_copy __am_options "${4:-options}" - - eerror_unless [ ${#__am_action_descs[*]} -gt 0 ] "Vous devez spécifier le tableau des actions" || return - __actions_menu || return 1 - _setv "${1:-action}" "$__am_select_action" - _setv "${2:-option}" "$__am_select_option" -} - -function __actions_menu() { - local title="$__am_title" - local optyc="$__am_optyc" actyc="$__am_actyc" - local default_action="$__am_default_action" - local quit_action="$__am_quit_action" - local select_action="$__am_select_action" - local select_option="$__am_select_option" - local -a action_descs options void_actions - base_array_copy action_descs __am_action_descs - base_array_copy options __am_options - base_array_copy void_actions __am_void_actions - - # Calculer la liste des actions valides - local no_options - base_array_isempty options && no_options=1 - - local -a actions - local tmp action name - for tmp in "${action_descs[@]}"; do - splitfsep2 "$tmp" : action name - [ -n "$action" ] || action="${name:0:1}" - action="$(strlower "$action")" - base_array_addu actions "$action" - done - - # Calculer l'action par défaut - if [ "$default_action" == auto ]; then - # si action par défaut non spécifiée, alors prendre la première action - default_action="$select_action" - if [ -n "$default_action" ]; then - base_array_contains actions "$default_action" || default_action= - fi - [ -n "$default_action" ] || default_action="${actions[0]}" - fi - default_action="${default_action:0:1}" - default_action="$(strlower "$default_action")" - - # Calculer l'action quitter par défaut - if [ "$quit_action" == auto ]; then - # si action par défaut non spécifiée, alors prendre la dernière action, - # s'il y a au moins 2 actions - if [ ${#actions[*]} -gt 1 ]; then - quit_action="${actions[@]:$((-1)):1}" - base_array_addu void_actions "$quit_action" - fi - fi - quit_action="${quit_action:0:1}" - quit_action="$(strlower "$quit_action")" - - # Calculer la ligne des actions à afficher - local action_title - for tmp in "${action_descs[@]}"; do - splitfsep2 "$tmp" : action name - [ -n "$action" ] || action="${name:0:1}" - [ -n "$name" ] || name="$action" - action="$(strlower "$action")" - if [ -n "$no_options" ]; then - if ! base_array_contains void_actions "$action"; then - base_array_del actions "$action" - continue - fi - fi - [ "$action" == "$default_action" ] && name="$name*" - action_title="${action_title:+$action_title/}$name" - done - if [ -n "$default_action" ]; then - # si action par défaut invalide, alors pas d'action par défaut - base_array_contains actions "$default_action" || default_action= - fi - if [ -n "$quit_action" ]; then - # si action quitter invalide, alors pas d'action quitter - base_array_contains actions "$quit_action" || quit_action= - fi - - # Type de menu - if [ -n "$no_options" ]; then - if base_array_isempty void_actions; then - eerror "Aucune option n'est définie. Il faut définir le tableau des actions vides" - return 1 - fi - __void_actions_menu - else - __options_actions_menu - fi -} - -function __void_actions_menu() { - eflush - local c=0 choice - while true; do - if [ $c -eq 0 ]; then - [ -n "$title" ] && __etitle "$title" 1>&2 - __eecho_ "=== Actions disponibles: " 1>&2 - tooenc "$action_title" 1>&2 - fi - if [ -n "$actyc" ]; then - __eecho_ "$actyc" 1>&2 - elif [ -n "$optyc" ]; then - __eecho_ "$optyc" 1>&2 - else - __eecho_ "Entrez l'action à effectuer" 1>&2 - fi - tooenc_ ": " 1>&2 - uread choice - if [ -z "$choice" -a -n "$default_action" ]; then - select_action="$default_action" - break - fi - - # vérifier la saisie - choice="${choice:0:1}" - choice="$(strlower "$choice")" - if base_array_contains actions "$choice"; then - select_action="$choice" - break - elif [ -n "$choice" ]; then - eerror "$choice: action incorrecte" - else - eerror "vous devez saisir l'action à effectuer" - fi - let c=$c+1 - if [ $c -eq 5 ]; then - # sauter une ligne toutes les 4 tentatives - tooenc "" 1>&2 - c=0 - fi - done - __am_select_action="$select_action" - __am_select_option= -} - -function __options_actions_menu() { - eflush - local c=0 option choice action option - while true; do - if [ $c -eq 0 ]; then - [ -n "$title" ] && __etitle "$title" 1>&2 - i=1 - for option in "${options[@]}"; do - if [ "$option" == "$select_option" ]; then - tooenc "$i*- $option" 1>&2 - else - tooenc "$i - $option" 1>&2 - fi - let i=$i+1 - done - __estepn_ "Actions disponibles: " 1>&2 - tooenc "$action_title" 1>&2 - fi - if [ -n "$optyc" ]; then - __eecho_ "$optyc" 1>&2 - else - __eecho_ "Entrez l'action et le numéro de l'option choisie" 1>&2 - fi - tooenc_ ": " 1>&2 - uread choice - - # vérifier la saisie - if [ -z "$choice" -a -n "$default_action" ]; then - action="$default_action" - if base_array_contains void_actions "$action"; then - select_action="$action" - select_option= - break - elif [ -n "$select_option" ]; then - select_action="$action" - break - fi - fi - action="${choice:0:1}" - action="$(strlower "$action")" - if base_array_contains actions "$action"; then - # on commence par un code d'action valide. cool :-) - if base_array_contains void_actions "$action"; then - select_action="$action" - select_option= - break - else - option="${choice:1}" - option="${option// /}" - if [ -z "$option" -a -n "$select_option" ]; then - select_action="$action" - break - elif [ -z "$option" ]; then - eerror "vous devez saisir le numéro de l'option" - elif isnum "$option"; then - if [ $option -gt 0 -a $option -le ${#options[*]} ]; then - select_action="$action" - select_option="${options[$(($option - 1))]}" - break - fi - else - eerror "$option: numéro d'option incorrecte" - fi - fi - elif isnum "$choice"; then - # on a simplement donné un numéro d'option - action="$default_action" - if [ -n "$action" ]; then - if base_array_contains void_actions "$action"; then - select_action="$action" - select_option= - break - else - option="${choice// /}" - if [ -z "$option" ]; then - eerror "vous devez saisir le numéro de l'option" - elif isnum "$option"; then - if [ $option -gt 0 -a $option -le ${#options[*]} ]; then - select_action="$action" - select_option="${options[$(($option - 1))]}" - break - fi - else - eerror "$option: numéro d'option incorrecte" - fi - fi - else - eerror "Vous devez spécifier l'action à effectuer" - fi - elif [ -n "$choice" ]; then - eerror "$choice: action et/ou option incorrecte" - else - eerror "vous devez saisir l'action à effectuer" - fi - let c=$c+1 - if [ $c -eq 5 ]; then - # sauter une ligne toutes les 4 tentatives - tooenc "" 1>&2 - c=0 - fi - done - __am_select_action="$select_action" - __am_select_option="$select_option" -} diff --git a/lib/nulib/bash/base.path b/lib/nulib/bash/base.path deleted file mode 100644 index 0965347..0000000 --- a/lib/nulib/bash/base.path +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.path base_ "Fonctions de base: gestion des chemins et des fichiers" -require: base.core - -function: base_in_path "tester l'existence d'un programme dans le PATH" -function base_in_path() { - [ -n "$1" -a -x "$(which "$1" 2>/dev/null)" ] -} - -function: base_delpath "supprimer le chemin \$1 de \$2(=PATH)" -function base_delpath() { - local _qdir="${1//\//\\/}" - eval "export ${2:-PATH}; ${2:-PATH}"'="${'"${2:-PATH}"'#$1:}"; '"${2:-PATH}"'="${'"${2:-PATH}"'%:$1}"; '"${2:-PATH}"'="${'"${2:-PATH}"'//:$_qdir:/:}"; [ "$'"${2:-PATH}"'" == "$1" ] && '"${2:-PATH}"'=' -} - -function: base_addpath "Ajouter le chemin \$1 à la fin, dans \$2(=PATH), s'il n'y existe pas déjà" -function base_addpath() { - local _qdir="${1//\//\\/}" - eval "export ${2:-PATH}; "'[ "${'"${2:-PATH}"'#$1:}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'%:$1}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'//:$_qdir:/:}" == "$'"${2:-PATH}"'" -a "$'"${2:-PATH}"'" != "$1" ] && '"${2:-PATH}"'="${'"${2:-PATH}"':+$'"${2:-PATH}"':}$1"' -} - -function: base_inspathm "Ajouter le chemin \$1 au début, dans \$2(=PATH), s'il n'y existe pas déjà" -function base_inspathm() { - local _qdir="${1//\//\\/}" - eval "export ${2:-PATH}; "'[ "${'"${2:-PATH}"'#$1:}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'%:$1}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'//:$_qdir:/:}" == "$'"${2:-PATH}"'" -a "$'"${2:-PATH}"'" != "$1" ] && '"${2:-PATH}"'="$1${'"${2:-PATH}"':+:$'"${2:-PATH}"'}"' -} - -function: base_inspath "S'assurer que le chemin \$1 est au début de \$2(=PATH)" -function base_inspath() { - base_delpath "$@" - base_inspathm "$@" -} - -function: base_push_cwd "enregistrer le répertoire courant dans la variable \$2(=cwd) et se placer dans le répertoire \$1" -function base_push_cwd() { - eval "${2:-cwd}"'="$(pwd)"' - cd "$1" -} -function: base_pop_cwd "se placer dans le répertoire \${!\$2}(=\$cwd) puis retourner le code d'erreur \$1(=0)" -function base_pop_cwd() { - eval 'cd "$'"${2:-cwd}"'"' - return "${1:-0}" -} - -################################################################################ -## fichiers temporaires - -function: base_mktempf "générer un fichier temporaire et retourner son nom" -function base_mktempf() { - mktemp "${1:-"$TMPDIR/tmp.XXXXXX"}" -} - -function: base_mktempd "générer un répertoire temporaire et retourner son nom" -function base_mktempd() { - mktemp -d "${1:-"$TMPDIR/tmp.XXXXXX"}" -} - -function base_ac__forgetall() { NULIB__AC_FILES=(); } -base_ac__forgetall -function base_ac__trap() { - local file - for file in "${NULIB__AC_FILES[@]}"; do - [ -e "$file" ] && rm -rf "$file" 2>/dev/null - done - base_ac__forgetall -} -trap base_ac__trap 1 3 15 EXIT - -function: base_autoclean "\ -Ajouter les fichiers spécifiés à la liste des fichiers à supprimer à la fin du -programme" -function base_autoclean() { - local file - for file in "$@"; do - [ -n "$file" ] && NULIB__AC_FILES=("${NULIB__AC_FILES[@]}" "$file") - done -} - -function: base_ac_cleanall "\ -Supprimer *tous* les fichiers temporaires gérés par autoclean tout de suite." -function base_ac_cleanall() { - base_ac__trap -} - -function: base_ac_clean "\ -Supprimer les fichier temporaires \$1..@ si et seulement s'ils ont été générés -par base_ac_set_tmpfile() ou base_ac_set_tmpdir()" -function base_ac_clean() { - local file acfile found - local -a acfiles - for acfile in "${NULIB__AC_FILES[@]}"; do - found= - for file in "$@"; do - if [ "$file" == "$acfile" ]; then - found=1 - [ -e "$file" ] && rm -rf "$file" 2>/dev/null - break - fi - done - [ -z "$found" ] && acfiles=("${acfiles[@]}" "$acfile") - done - NULIB__AC_FILES=("${acfiles[@]}") -} - -function: base_ac_set_tmpfile "\ -Créer un fichier temporaire avec le motif \$2, l'ajouter à la liste des -fichiers à supprimer en fin de programme, et mettre sa valeur dans la -variable \$1 - -En mode debug, si (\$5 est vide ou \${!5} est une valeur vraie), et si \$3 n'est -pas vide, prendre ce fichier au lieu de générer un nouveau fichier temporaire. -Si \$4==keep, ne pas écraser le fichier \$3 s'il existe." -function base_ac_set_tmpfile() { - local se__d - if base_is_debug; then - if [ -n "$5" ]; then - is_yes "${!5}" && se__d=1 - else - se__d=1 - fi - fi - if [ -n "$se__d" -a -n "$3" ]; then - _setv "$1" "$3" - [ -f "$3" -a "$4" == keep ] || >"$3" - else - local se__t="$(base_mktempf "$2")" - base_autoclean "$se__t" - _setv "$1" "$se__t" - fi -} - -function: base_ac_set_tmpdir "\ -Créer un répertoire temporaire avec le motif \$2, l'ajouter à la liste des -fichiers à supprimer en fin de programme, et mettre sa valeur dans la -variable \$1 - -En mode debug, si (\$4 est vide ou \${!4} est une valeur vraie), et si \$3 n'est -pas vide, prendre ce nom de répertoire au lieu de créer un nouveau répertoire -temporaire" -function base_ac_set_tmpdir() { - local sr__d - if base_is_debug; then - if [ -n "$4" ]; then - is_yes "${!4}" && sr__d=1 - else - sr__d=1 - fi - fi - if [ -n "$sr__d" -a -n "$3" ]; then - _setv "$1" "$3" - mkdir -p "$3" - else - local sr__t="$(base_mktempd "$2")" - base_autoclean "$sr__t" - _setv "$1" "$sr__t" - fi -} diff --git a/lib/nulib/bash/base.split b/lib/nulib/bash/base.split deleted file mode 100644 index 5e900e1..0000000 --- a/lib/nulib/bash/base.split +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.split base_ "Fonctions de base: analyse et découpage de valeurs" -require: base.arr - -function: base_splitfsep "\ -Découper \$1 de la forme first[SEPsecond] entre first, qui est placé dans la -variable \$3(=first) et second, qui est placée dans la variable \$4(=second). \$2 -est la valeur SEP. Le découpage est faite sur la *première* occurence de SEP." -function base_splitfsep() { - if [[ "$1" == *"$2"* ]]; then - setv "${3:-first}" "${1%%$2*}" - setv "${4:-second}" "${1#*$2}" - else - setv "${3:-first}" "$1" - setv "${4:-second}" - fi -} - -function: base_splitfsep2 "\ -Découper \$1 de la forme [firstSEP]second entre first, qui est placé dans la -variable \$3(=first) et second, qui est placée dans la variable \$4(=second). \$2 -est la valeur SEP. Le découpage est faite sur la *première* occurence de SEP." -function base_splitfsep2() { - if [[ "$1" == *"$2"* ]]; then - setv "${3:-first}" "${1%%$2*}" - setv "${4:-second}" "${1#*$2}" - else - setv "${3:-first}" - setv "${4:-second}" "$1" - fi -} - -function: base_splitlsep "\ -Découper \$1 de la forme first[SEPsecond] entre first, qui est placé dans la -variable \$3(=first) et second, qui est placée dans la variable \$4(=second). \$2 -est la valeur SEP. Le découpage est faite sur la *dernière* occurence de SEP." -function base_splitlsep() { - if [[ "$1" == *"$2"* ]]; then - setv "${3:-first}" "${1%$2*}" - setv "${4:-second}" "${1##*$2}" - else - setv "${3:-first}" "$1" - setv "${4:-second}" - fi -} - -function: base_splitlsep2 "\ -Découper \$1 de la forme [firstSEP]second entre first, qui est placé dans la -variable \$3(=first) et second, qui est placée dans la variable \$4(=second). \$2 -est la valeur SEP. Le découpage est faite sur la *dernière* occurence de SEP." -function base_splitlsep2() { - if [[ "$1" == *"$2"* ]]; then - setv "${3:-first}" "${1%$2*}" - setv "${4:-second}" "${1##*$2}" - else - setv "${3:-first}" - setv "${4:-second}" "$1" - fi -} - -function: base_splitvar "\ -Découper \$1 de la forme name[=value] entre le nom, qui est placé dans la -variable \$2(=name) et la valeur, qui est placée dans la variable \$3(=value)" -function base_splitvar() { - splitfsep "$1" = "${2:-name}" "${3:-value}" -} - -function: base_splitpath "\ -Découper \$1 de la forme [dir/]name entre le répertoire, qui est placé dans la -variable \$2(=dir), et le nom du fichier, qui est placé dans la variable -\$3(=name)" -function base_splitpath() { - splitlsep2 "$1" / "${2:-dir}" "${3:-name}" -} - -function: base_splitname "\ -Découper \$1 de la forme basename[.ext] entre le nom de base du fichier, qui -est placé dans la variable \$2(=basename) et l'extension, qui est placée dans -la variable \$3(=ext) - -Attention, si \$1 est un chemin, le résultat risque d'être faussé. Par exemple, -'splitname a.b/c' ne donne pas le résultat escompté." -function base_splitname() { - splitlsep "$1" . "${2:-basename}" "${3:-ext}" -} - -function: base_splithost "\ -Découper \$1 de la forme hostname[.domain] entre le nom d'hôte, qui est placé -dans la variable \$2(=hostname) et le domaine, qui est placée dans la variable -\$3(=domain)" -function base_splithost() { - splitfsep "$1" . "${2:-hostname}" "${3:-domain}" -} - -function: base_splituserhost "\ -Découper \$1 de la forme [user@]host entre le nom de l'utilisateur, qui est placé -dans la variable \$2(=user) et le nom d'hôte, qui est placée dans la variable -\$3(=host)" -function base_splituserhost() { - splitfsep2 "$1" @ "${2:-user}" "${3:-host}" -} - -function: base_splitpair "\ -Découper \$1 de la forme first[:second] entre la première valeur, qui est placé -dans la variable \$2(=src) et la deuxième valeur, qui est placée dans la variable -\$3(=dest)" -function base_splitpair() { - splitfsep "$1" : "${2:-src}" "${3:-dest}" -} - -function: base_splitproxy "\ -Découper \$1 de la forme http://[user:password@]host[:port]/ entre les valeurs -\$2(=host), \$3(=port), \$4(=user), \$5(=password) - -S'il n'est pas spécifié, port vaut 3128 par défaut" -function base_splitproxy() { - local sy__tmp sy__host sy__port sy__creds sy__user sy__password - - sy__tmp="${1#http://}" - if [[ "$sy__tmp" == *@* ]]; then - sy__creds="${sy__tmp%%@*}" - sy__tmp="${sy__tmp#${sy__creds}@}" - splitpair "$sy__creds" sy__user sy__password - fi - sy__tmp="${sy__tmp%%/*}" - splitpair "$sy__tmp" sy__host sy__port - [ -n "$sy__port" ] || sy__port=3128 - - setv "${2:-host}" "$sy__host" - setv "${3:-port}" "$sy__port" - setv "${4:-user}" "$sy__user" - setv "${5:-password}" "$sy__password" -} - -function: base_spliturl "\ -Découper \$1 de la forme scheme://[user:password@]host[:port]/path entre les -valeurs \$2(=scheme), \$3(=user), \$4(=password), \$5(=host), \$6(=port), \$7(=path) - -S'il n'est pas spécifié, port vaut 80 pour http, 443 pour https, 21 pour ftp" -function base_spliturl() { - local sl__tmp sl__scheme sl__creds sl__user sl__password sl__host sl__port sl__path - - sl__scheme="${1%%:*}" - sl__tmp="${1#${sl__scheme}://}" - if [[ "$sl__tmp" == */* ]]; then - sl__path="${sl__tmp#*/}" - sl__tmp="${sl__tmp%%/*}" - fi - if [[ "$sl__tmp" == *@* ]]; then - sl__creds="${sl__tmp%%@*}" - sl__tmp="${sl__tmp#${sl__creds}@}" - splitpair "$sl__creds" sl__user sl__password - fi - splitpair "$sl__tmp" sl__host sl__port - if [ -z "$sl__port" ]; then - [ "$sl__scheme" == "http" ] && sl__port=80 - [ "$sl__scheme" == "https" ] && sl__port=443 - [ "$sl__scheme" == "ftp" ] && sl__port=21 - fi - - setv "${2:-scheme}" "$sl__scheme" - setv "${3:-user}" "$sl__user" - setv "${4:-password}" "$sl__password" - setv "${5:-host}" "$sl__host" - setv "${6:-port}" "$sl__port" - setv "${7:-path}" "$sl__path" -} - -function: base_splitwcs "\ -Découper un nom de chemin \$1 entre la partie sans wildcards, qui est placée dans -la variables \$2(=basedir), et la partie avec wildcards, qui est placée dans la -variable \$3(=filespec)" -function base_splitwcs() { - local ss__p="$1" - local ss__dd="${2:-basedir}" ss__df="${3:-filespec}" ss__part ss__d ss__f - local -a ss__parts - base_array_split ss__parts "$ss__p" "/" - for ss__part in "${ss__parts[@]}"; do - if [[ "$ss__part" == *\** ]] || [[ "$ss__part" == *\?* ]] || [ -n "$ss__f" ]; then - ss__f="${ss__f:+$ss__f/}$ss__part" - else - ss__d="${ss__d:+$ss__d/}$ss__part" - fi - done - [ "${ss__p#/}" != "$ss__p" ] && ss__d="/$ss__d" - _setv "$ss__dd" "$ss__d" - _setv "$ss__df" "$ss__f" -} diff --git a/lib/nulib/bash/base.str b/lib/nulib/bash/base.str deleted file mode 100644 index 7200a3c..0000000 --- a/lib/nulib/bash/base.str +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: base.str base_ "Fonctions de base: gestion des valeurs chaines" - -function: base_strmid "Afficher la plage \$1 de la valeur \$2..* - -La plage peut être d'une des formes 'start', '[start]:length'. Si start est -négatif, le compte est effectué à partir de la fin de la chaine. Si length est -négatif, il est rajouté à la longueur de la chaine à partir de start" -function base_strmid() { - local range="$1"; shift - local str="$*" - if [[ "$range" == *:-* ]]; then - local max=${#str} - [ $max -eq 0 ] && return - local start="${range%%:*}" - [ -n "$start" ] || start=0 - while [ "$start" -lt 0 ]; do - start=$(($max$start)) - done - max=$(($max-$start)) - local length="${range#*:}" - while [ "$length" -lt 0 ]; do - length=$(($max$length)) - done - range="$start:$length" - fi - eval 'echo "${str:'" $range"'}"' -} - -function: base_strrepl "Remplacer dans la valeur \$3..* le motif \$1 par la chaine \$2 - -\$1 peut commencer par l'un des caractères /, #, % pour indiquer le type de recherche" -function base_strrepl() { - local pattern="$1"; shift - local repl="$1"; shift - local str="$*" - local cmd='echo "${str/' - if [ "${pattern#/}" != "$pattern" ]; then - pattern="${pattern#/}" - cmd="$cmd/" - elif [ "${pattern#\#}" != "$pattern" ]; then - pattern="${pattern#\#}" - cmd="$cmd#" - elif [ "${pattern#%}" != "$pattern" ]; then - pattern="${pattern#%}" - cmd="$cmd%" - fi - cmd="$cmd"'$pattern/$repl}"' - eval "$cmd" -} - -function: base_strlcomp "transformer dans le flux en entrée en UTF-8 certains caractères en leur équivalent transformable en latin1. - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_strlcomp() { - if [ $# -gt 0 ]; then base_strlcomp <<<"$*" - else LANG=fr_FR.UTF-8 sed $' -s/[\xE2\x80\x90\xE2\x80\x91\xE2\x80\x92\xE2\x80\x93\xE2\x80\x94\xE2\x80\x95]/-/g -s/[‘’]/\x27/g -s/[«»“”]/"/g -s/[\xC2\xA0\xE2\x80\x87\xE2\x80\xAF\xE2\x81\xA0]/ /g -s/[œ]/oe/g -s/[Œ]/OE/g -s/[æ]/ae/g -s/[Æ]/AE/g -s/a\xCC\x80/à/g -s/e\xCC\x81/é/g; s/e\xCC\x80/è/g; s/e\xCC\x82/ê/g; s/e\xCC\x88/ë/g -s/i\xCC\x88/ï/g; s/i\xCC\x82/î/g -s/o\xCC\x82/ô/g; s/o\xCC\x88/ö/g -s/u\xCC\x88/ü/g; s/u\xCC\x82/û/g -s/c\xCC\xA7/ç/g -s/A\xCC\x80/À/g -s/E\xCC\x81/É/g; s/E\xCC\x80/È/g; s/E\xCC\x82/Ê/g; s/E\xCC\x88/Ë/g -s/I\xCC\x88/Ï/g; s/I\xCC\x82/Î/g -s/O\xCC\x82/Ô/g; s/O\xCC\x88/Ö/g -s/U\xCC\x88/Ü/g; s/U\xCC\x82/Û/g -s/C\xCC\xA7/Ç/g -' - fi -} - -function: base_strnacc "supprimer les accents dans le flux en entrée en UTF-8 - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_strnacc() { - if [ $# -gt 0 ]; then base_strnacc <<<"$*" - else LANG=fr_FR.UTF-8 sed ' -s/[à]/a/g -s/[éèêë]/e/g -s/[ïî]/i/g -s/[ôö]/o/g -s/[üû]/u/g -s/[ç]/c/g -s/[À]/A/g -s/[ÉÈÊË]/E/g -s/[ÏÎ]/I/g -s/[ÔÖ]/O/g -s/[ÜÛ]/U/g -s/[Ç]/C/g -' - fi -} - -function: base_stripnl "Supprimer dans le flux en entrée les caractères de fin de ligne - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_stripnl() { - if [ $# -gt 0 ]; then base_stripnl <<<"$*" - else tr -d '\r\n' - fi -} - -function: base_nl2lf "transformer dans le flux en entrée les fins de ligne en LF - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_nl2lf() { - if [ $# -gt 0 ]; then base_nl2lf <<<"$*" - else lawk 'BEGIN {RS="\r|\r\n|\n"} {print}' - fi -} - -function: base_nl2crlf "transformer dans le flux en entrée les fins de ligne en CRLF - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_nl2crlf() { - if [ $# -gt 0 ]; then base_nl2crlf <<<"$*" - else lawk 'BEGIN {RS="\r|\r\n|\n"} {print $0 "\r"}' - fi -} - -function: base_nl2cr "transformer dans le flux en entrée les fins de ligne en CR - -si cette fonction est appelée avec des arguments, prendre \$* comme valeur du flux en entrée." -function base_nl2cr() { - if [ $# -gt 0 ]; then base_nl2cr <<<"$*" - else lawk 'BEGIN {RS="\r|\r\n|\n"; ORS=""} {print $0 "\r"}' - fi -} diff --git a/lib/nulib/bash/git b/lib/nulib/bash/git deleted file mode 100644 index 99c213b..0000000 --- a/lib/nulib/bash/git +++ /dev/null @@ -1,704 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -##@require nulib.sh -##@require base -module: git "" "Fonctions pour faciliter l'utilisation de git" -require: nulib.sh base - -function: git_geturl "" -function git_geturl() { - git config --get remote.origin.url -} - -function: git_have_annex "" -function git_have_annex() { - [ -n "$(git config --get annex.uuid)" ] -} - -NULIB_GIT_FUNCTIONS=( - git_check_gitvcs git_ensure_gitvcs - git_list_branches git_list_rbranches - git_have_branch git_have_rbranch - git_get_branch git_is_branch - git_have_remote git_track_branch - git_check_cleancheckout git_ensure_cleancheckout - git_is_ancestor git_should_ff git_should_push - git_is_merged -) -NULIB_GIT_FUNCTIONS_MAP=( - cg:git_check_gitvcs eg:git_ensure_gitvcs - lbs:git_list_branches rbs:git_list_rbranches - hlb:git_have_branch hrb:git_have_rbranch - gb:git_get_branch ib:git_is_branch - hr:git_have_remote tb:git_track_branch - cc:git_check_cleancheckout ec:git_ensure_cleancheckout - ia:git_is_ancestor sff:git_should_ff spu:git_should_push - im:git_is_merged -) - -function: git_check_gitvcs "" -function git_check_gitvcs() { - git rev-parse --show-toplevel >&/dev/null -} - -function: git_ensure_gitvcs "" -function git_ensure_gitvcs() { - git_check_gitvcs || edie "Ce n'est pas un dépôt git" || return -} - -function: git_list_branches "" -function git_list_branches() { - git for-each-ref refs/heads/ --format='%(refname:short)' | csort -} - -function: git_list_rbranches "" -function git_list_rbranches() { - git for-each-ref "refs/remotes/${1:-origin}/" --format='%(refname:short)' | csort -} - -function: git_list_pbranches "lister les branches locales et celles qui existent dans l'origine \$1(=origin) et qui pourraient devenir une branche locale avec la commande git checkout -b" -function git_list_pbranches() { - local prefix="${1:-origin}/" - { - git for-each-ref refs/heads/ --format='%(refname:short)' - git for-each-ref "refs/remotes/$prefix" --format='%(refname:short)' | grep -F "$prefix" | cut -c $((${#prefix} + 1))- - } | grep -vF HEAD | csort -u -} - -function: git_have_branch "" -function git_have_branch() { - git_list_branches | grep -qF "$1" -} - -function: git_have_rbranch "" -function git_have_rbranch() { - git_list_rbranches "${2:-origin}" | grep -qF "$1" -} - -function: git_get_branch "" -function git_get_branch() { - git rev-parse --abbrev-ref HEAD 2>/dev/null -} - -function: git_get_branch_remote "" -function git_get_branch_remote() { - local branch="$1" - [ -n "$branch" ] || branch="$(git_get_branch)" - [ -n "$branch" ] || return - git config --get "branch.$branch.remote" -} - -function: git_get_branch_merge "" -function git_get_branch_merge() { - local branch="$1" - [ -n "$branch" ] || branch="$(git_get_branch)" - [ -n "$branch" ] || return - git config --get "branch.$branch.merge" -} - -function: git_get_branch_rbranch "" -function git_get_branch_rbranch() { - local branch="$1" remote="$2" merge - [ -n "$branch" ] || branch="$(git_get_branch)" - [ -n "$branch" ] || return - [ -n "$remote" ] || remote="$(git_get_branch_remote "$branch")" - [ -n "$remote" ] || return - merge="$(git_get_branch_merge "$branch")" - [ -n "$merge" ] || return - echo "refs/remotes/$remote/${merge#refs/heads/}" -} - -function: git_is_branch "" -function git_is_branch() { - [ "$(git_get_branch)" == "${1:-master}" ] -} - -function: git_have_remote "" -function git_have_remote() { - [ -n "$(git config --get remote.${1:-origin}.url)" ] -} - -function: git_track_branch "" -function git_track_branch() { - local branch="$1" origin="${2:-origin}" - [ -n "$branch" ] || return - git_have_remote "$origin" || return - [ "$(git config --get branch.$branch.remote)" == "$origin" ] && return - if git_have_rbranch "$branch" "$origin"; then - if git_have_branch "$branch"; then - git branch -u "$origin/$branch" "$branch" - else - git branch -t "$branch" "$origin/$branch" - fi - elif git_have_branch "$branch"; then - git push -u "$origin" "$branch" || return - fi -} - -function: git_ensure_branch " -@return 0 si la branche a été créée, 1 si elle existait déjà, 2 en cas d'erreur" -function git_ensure_branch() { - local branch="$1" source="${2:-master}" origin="${3:-origin}" - [ -n "$branch" ] || return 2 - git_have_branch "$branch" && return 1 - if git_have_rbranch "$branch" "$origin"; then - # une branche du même nom existe dans l'origine. faire une copie de cette branche - git branch -t "$branch" "$origin/$branch" || return 2 - else - # créer une nouvelle branche du nom spécifié - git_have_branch "$source" || return 2 - git branch "$branch" "$source" || return 2 - if [ -z "$NULIB_GIT_OFFLINE" ]; then - git_have_remote "$origin" && git_track_branch "$branch" "$origin" - fi - fi - return 0 -} - -function: git_check_cleancheckout "vérifier qu'il n'y a pas de modification locales dans le dépôt correspondant au répertoire courant." -function git_check_cleancheckout() { - [ -z "$(git status --porcelain 2>/dev/null)" ] -} - -function: git_ensure_cleancheckout "" -function git_ensure_cleancheckout() { - git_check_cleancheckout || - edie "Vous avez des modifications locales. Enregistrez ces modifications avant de continuer" || return -} - -function git__init_ff() { - o="${3:-origin}" - b="$1" s="${2:-refs/remotes/$o/$1}" - b="$(git rev-parse --verify --quiet "$b")" || return 1 - s="$(git rev-parse --verify --quiet "$s")" || return 1 - return 0 -} -function git__can_ff() { - [ "$1" == "$(git merge-base "$1" "$2")" ] -} - -function: git_is_ancestor "vérifier que la branche \$1 est un ancêtre direct de la branche \$2, qui vaut par défaut refs/remotes/\${3:-origin}/\$1 -note: cette fonction retourne vrai si \$1 et \$2 identifient le même commit" -function git_is_ancestor() { - local o b s; git__init_ff "$@" || return - git__can_ff "$b" "$s" -} - -function: git_should_ff "vérifier si la branche \$1 devrait être fast-forwardée à partir de la branche d'origine \$2, qui vaut par défaut refs/remotes/\${3:-origin}/\$1 -note: cette fonction est similaire à git_is_ancestor(), mais retourne false si \$1 et \$2 identifient le même commit" -function git_should_ff() { - local o b s; git__init_ff "$@" || return - [ "$b" != "$s" ] || return 1 - git__can_ff "$b" "$s" -} - -function: git_should_push "vérifier si la branche \$1 devrait être poussée vers la branche de même nom dans l'origine \$2(=origin), parce que l'origin peut-être fast-forwardée à partir de cette branche." -function git_should_push() { - git_should_ff "refs/remotes/${2:-origin}/$1" "$1" -} - -function: git_fast_forward "vérifier que la branche courante est bien \$1, puis tester s'il faut la fast-forwarder à partir de la branche d'origine \$2, puis le faire si c'est nécessaire. la branche d'origine \$2 vaut par défaut refs/remotes/origin/\$1" -function git_fast_forward() { - local o b s; git__init_ff "$@" || return - [ "$b" != "$s" ] || return 1 - local head="$(git rev-parse HEAD)" - [ "$head" == "$b" ] || return 1 - git__can_ff "$b" "$s" || return 1 - git merge --ff-only "$s" -} - -function: git_is_merged "vérifier que les branches \$1 et \$2 ont un ancêtre commun, et que la branche \$1 a été complètement fusionnée dans la branche destination \$2" -function git_is_merged() { - local b="$1" d="$2" - b="$(git rev-parse --verify --quiet "$b")" || return 1 - d="$(git rev-parse --verify --quiet "$d")" || return 1 - [ -n "$(git merge-base "$b" "$d")" ] || return 1 - [ -z "$(git rev-list "$d..$b")" ] -} - -################################################################################ -# git annex - -NULIB_GIT_SSH_WRAPPER= -function: git_annex_use_ssh_wrapper "" -function git_annex_use_ssh_wrapper() { - [ -n "$NULIB_GIT_SSH_WRAPPER" ] && return - NULIB_GIT_FORCE_PATH="$PATH" - NULIB_GIT_FORCE_SSH="${GIT_SSH:-ssh}" - export NULIB_GIT_FORCE_PATH NULIB_GIT_FORCE_SSH - base_delpath "$NULIBDIR/ssh-wrapper" NULIB_GIT_FORCE_PATH - base_inspath "$NULIBDIR/ssh-wrapper" PATH - NULIB_GIT_SSH_WRAPPER=1 -} - -function: git_annex_initial "sur le dépôt \$1 fraichement cloné, vérifier s'il faut faire git annex init. Si oui, l'initialiser avec le nom d'hôte, et récupérer tous les fichiers annexés -@return 1 si une erreur s'est produite" -function git_annex_initial() { - local repodir="${1:-.}" - [ -d "$repodir" ] || return 1 - repodir="$(abspath "$repodir")" - - local GIT_DIR GIT_WORK_TREE - [ "$(cd "$repodir"; git rev-parse --is-bare-repository)" == false ] || return 0 - [ -n "$(GIT_DIR="$repodir/.git" git config --get annex.uuid)" ] && return 0 - - # ici, on sait que git annex n'a pas encore été configuré - # vérifier s'il existe des fichiers annexés - local -a links - base_array_splitl links "$(find "$repodir" -type l)" - local link hasannex= - for link in "${links[@]}"; do - link="$(readlink "$link")" - if [ "${link#.git/annex/}" != "$link" ]; then - hasannex=1 - break - elif [[ "$link" == */.git/annex/* ]]; then - hasannex=1 - break - fi - done - - if [ -n "$hasannex" ]; then - base_in_path git-annex || edie "Vous devez installer git-annex" || return - local cwd; base_push_cwd "$repodir" && - git annex init "$MYHOSTNAME" && - git annex get && - git annex sync && - base_pop_cwd || base_pop_cwd 1 || return - fi -} - -################################################################################ -# Outils de haut niveau - -function: git_commit "" -function git_commit() { - local all=auto allnew push=auto nopush args - setyesval nopush "$NULIB_GIT_OFFLINE" - [ -n "$nopush" ] && push= - parse_opts + "${PRETTYOPTS[@]}" \ - -a,--all all=1 \ - -A,--all-new allnew=1 \ - -c,--cached all= \ - -p,--push push=1 \ - -l,--local push= \ - @ args -- "$@" && set -- "${args[@]}" || { - eerror "$args" - return 1 - } - - if [ -n "$allnew" ]; then - git add -A - all= - fi - - local message="$1"; shift - local -a cmd - cmd=(git commit) - [ -n "$message" ] && cmd=("${cmd[@]}" -m "$message") - if [ "$all" == "auto" ]; then - # Si des fichiers sont spécifiés, prendre ceux-là. - if [ -z "$*" ]; then - # Sinon, s'il y a des fichiers dans l'index, commiter uniquement ces - # fichiers - # Sinon, committer tous les fichiers modifiés - # le code suivant retourne vrai si l'index contient au moins fichier - git status --porcelain 2>/dev/null | lawk ' - BEGIN { ec = 1 } - substr($0, 1, 1) ~ /[^ ?]/ { ec = 0; exit } - END { exit ec }' || - cmd=("${cmd[@]}" -a) - fi - else - [ -n "$all" ] && cmd=("${cmd[@]}" -a) - fi - - if ! "${cmd[@]}" "$@"; then - [ "$push" == auto ] && return 1 - fi - if [ "$push" == auto ]; then - git_push --auto || return - elif [ -n "$push" ]; then - git_push --force || return - fi - return 0 -} - -function: git_update "" -function git_update() { - local args autoff=1 - parse_opts + "${PRETTYOPTS[@]}" \ - -n,--no-autoff autoff= \ - @ args -- "$@" && set -- "${args[@]}" || { - eerror "$args" - return 1 - } - - if [ -z "$autoff" ]; then - git pull "$@" - return $? - fi - - local branch orig_branch restore_branch remote rbranch pbranch - local -a branches prbranches crbranches dbranches - - base_array_splitl prbranches "$(git_list_rbranches)" - git fetch -p "$@" || return - base_array_splitl crbranches "$(git_list_rbranches)" - - # vérifier s'il n'y a pas des branches distantes qui ont été supprimées - for branch in "${prbranches[@]}"; do - if ! base_array_contains crbranches "$branch"; then - base_array_add dbranches "${branch#*/}" - fi - done - if [ ${#dbranches[*]} -gt 0 ]; then - eimportant "One or more distant branches where deleted" - for branch in "${dbranches[@]}"; do - if git_have_branch "$branch"; then - if ! ask_yesno "Do you want to delete local branch $branch?" X; then - base_array_del dbranches "$branch" - fi - fi - done - fi - if [ ${#dbranches[*]} -gt 0 ]; then - base_array_splitl branches "$(git_list_branches)" - branch="$(git_get_branch)" - if base_array_contains dbranches "$branch"; then - # si la branche courante est l'une des branches à supprimer, il faut - # basculer vers develop ou master - local swto - if [ -z "$swto" ] && base_array_contains branches develop && ! base_array_contains dbranches develop; then - swto=develop - fi - if [ -z "$swto" ] && base_array_contains branches master && ! base_array_contains dbranches master; then - swto=master - fi - if ! git_check_cleancheckout; then - echo "* There are uncommitted local changes. However current branch is slated for removal. -Make your verifications then delete the local branches: - ${swto:+$(qvals git checkout "$swto") - }$(qvals git branch -D "${dbranches[@]}")" - return 1 - fi - if [ -n "$swto" ]; then - git checkout -q "$swto" - else - echo "* Current branch is slated for removal but I don't know to which branch I should switch first. -Make your choice then delete the local branches: - $(qvals git branch -D "${dbranches[@]}")" - return 1 - fi - fi - for branch in "${dbranches[@]}"; do - git branch -D "$branch" - done - fi - - # intégrer les modifications dans les branches locales - if ! git_check_cleancheckout; then - branch="$(git_get_branch)" - remote="$(git_get_branch_remote "$branch")" - rbranch="$(git_get_branch_rbranch "$branch" "$remote")" - pbranch="${rbranch#refs/remotes/}" - if git merge -q --ff-only "$rbranch"; then - echo "* There are uncommitted local changes: only CURRENT branch were updated" - fi - return 0 - fi - - orig_branch="$(git_get_branch)" - base_array_splitl branches "$(git_list_branches)" - for branch in "${branches[@]}"; do - remote="$(git_get_branch_remote "$branch")" - rbranch="$(git_get_branch_rbranch "$branch" "$remote")" - pbranch="${rbranch#refs/remotes/}" - [ -n "$remote" -a -n "$rbranch" ] || continue - if git_is_ancestor "$branch" "$rbranch"; then - if git_should_ff "$branch" "$rbranch"; then - echo "* Fast-forwarding $branch -> $pbranch" - git checkout -q "$branch" - git merge -q --ff-only "$rbranch" - restore_branch=1 - fi - else - if [ "$branch" == "$orig_branch" ]; then - echo "* Cannot fast-forward CURRENT branch $branch from $pbranch -Try to merge manually with: git merge $pbranch" - else - echo "* Cannot fast-forward local branch $branch from $pbranch -You can merge manually with: git checkout $branch; git merge $pbranch" - fi - fi - done - [ -n "$restore_branch" ] && git checkout -q "$orig_branch" - return 0 -} - -function: git_push "" -function git_push() { - local all all_branches all_tags auto force args no_annex - parse_opts + "${PRETTYOPTS[@]}" \ - -a,--all all=1 \ - -b,--branches,--all-branches all_branches=1 \ - -t,--tags,--all-tags all_tags=1 \ - --auto auto=1 \ - -f,--force force=1 \ - -n,--no-annex no_annex=1 \ - @ args -- "$@" && set -- "${args[@]}" || { - eerror "$args" - return 1 - } - - if [ -n "$all" ]; then - # On a demandé à pusher toutes les branches et tous les tags - local r - git push --all "$@"; r=$? - if [ $r -eq 0 ]; then - git push --tags "$@"; r=$? - fi - return $r - elif [ -n "$all_branches" ]; then - # On a demandé à pusher toutes les branches - git push --all "$@" - return $? - elif [ -n "$all_tags" ]; then - # On a demandé à pusher tous les tags - git push --tags "$@" - return $? - elif [ $# -gt 0 ]; then - # Sinon, si des arguments sont spécifiés, les passer à git sans - # modification - git push "$@" - return $? - elif git_have_annex; then - # Si une annexe existe dans le dépôt, demander à git-annex de faire la - # synchronisation, sauf si --no-annex est spécifié ou si on est en mode - # automatique - if [ -z "$no_annex" -a -z "$auto" ]; then - git annex sync - return $? - fi - fi - - # sinon on push vers origin. vérifier la présence du remote - [ -n "$(git config --get remote.origin.url)" ] || { - if [ -n "$auto" ]; then - # en mode automatique, ignorer l'absence de remote - return 0 - else - eerror "Aucun remote origin n'est défini" - return 1 - fi - } - - # puis calculer la branche à pusher - local branch="$(git rev-parse --abbrev-ref HEAD 2>/dev/null)" - local origin="$(git config --get "branch.$branch.remote")" - if [ -n "$branch" -a "$origin" == origin ]; then - if [ -n "$auto" ]; then - # en mode automatique, ne pousser que la branche courante - git push "$origin" "$branch" || return - else - # utiliser la configuration par défaut, qui est sous debian squeeze - # de pousser toutes les branches - git push || return - fi - elif [ -n "$force" ]; then - # utiliser la configuration par défaut, qui est sous debian squeeze de - # pousser toutes les branches - git push || return - fi - return 0 -} - -function git__pclone() { - estep "$1 --> $(ppath "$2")" - mkdirof "$2" || return 1 - git clone "$1" "$2" || return 1 - if [ -z "$3" ]; then - ( - cd "$2" - if git_have_rbranch develop; then - git checkout develop || exit 1 - fi - ) || return 1 - fi - git_annex_initial "$2" || return 1 -} -function git__gitolite_info() { - local mode="$1" urlbase="$2" pattern="$3" - case "$mode" in - http) curl -fs "$urlbase/info${pattern:+"?$pattern"}";; - ssh) ssh -q "$urlbase" info ${pattern:+"$pattern"} 2>/dev/null;; - esac -} -function git__filter_repos() { - lawk -v prefix="$1" ' -NR <= 2 { next } -{ - # filtrer les projets qui ne sont pas encore créés - if (substr($0, 5, 2) == " C") next - repo = substr($0, 6) - # filtrer les projets de type wildcard - if (repo ~ /[\[\]\*]/) next - # enlever le prefixe - if (prefix != "" && substr(repo, 1, length(prefix)) != prefix) next - print repo -}' -} - -function: git_clone "" -function git_clone() { - no_clone= - update= - nodevelop= - recursive= - parse_opts "${PRETTYOPTS[@]}" \ - -n,--no-clone no_clone=1 \ - -u,--update update=1 \ - -m,--master nodevelop=1 \ - -r,--recursive recursive=1 \ - @ args -- "$@" && set -- "${args[@]}" || edie "$args" || return - - if [ -n "$recursive" ]; then - repobase="$1" - [ -n "$repobase" ] || edie "Vous devez spécifier l'url de base des dépôts à cloner" || return - if [ "${repobase#http://}" != "$repobase" -o "${repobase#https://}" != "$repobase" ]; then - # accès par http - mode=http - splitfsep "$repobase" :// scheme hostuserpath - splitfsep "$hostuserpath" / host userpath - splitfsep "$userpath" / user basepath - [ -n "$host" -a -n "$user" ] || edie "Vous devez spécifier l'hôte e.g http://host/git/basepath" || return - urlbase="$scheme://$host/$user" - else - # accès par ssh - mode=ssh - splitfsep "$repobase" : userhost basepath - splituserhost "$userhost" user host - [ -n "$user" ] || user=git - [ -n "$host" ] || edie "Vous devez spécifier l'hôte" || return - urlbase="$user@$host" - fi - basepath="${basepath%/}" - destbase="${2:-.}" - - git_annex_use_ssh_wrapper - prefix="${basepath:+$basepath/}" - base_array_splitl repos "$(set -o pipefail; git__gitolite_info "$mode" "$urlbase" "$prefix" | git__filter_repos "$prefix")" || edie || return - for repo in "${repos[@]}"; do - case "$mode" in - http) repourl="$urlbase/$repo";; - ssh) repourl="$urlbase:$repo";; - esac - setx destdir=abspath "$destbase/${repo#$prefix}" - if [ -d "$destdir" ]; then - if [ -n "$update" ]; then - ( - ${no_clone:+qvals} cd "$destdir" - ${no_clone:+qvals} git pull - ) || edie || return - else - estepe "$(ppath2 "$destdir"): répertoire existant" - fi - elif [ -n "$no_clone" ]; then - qvals git clone "$repourl" "$destdir" - else - git__pclone "$repourl" "$destdir" "$nodevelop" || edie || return - fi - done - - else - repourl="${1%.git}" - [ -n "$repourl" ] || edie "Vous devez spécifier l'url du dépôt git" || return - - destdir="$2" - if [ -z "$destdir" ]; then - splitfsep "$repourl" : userhost path - setx destdir=basename -- "$path" - destdir="${destdir%.git}" - fi - setx destdir=abspath "$destdir" - - git_annex_use_ssh_wrapper - if [ -d "$destdir" ]; then - if [ -n "$update" ]; then - ( - ${no_clone:+qvals} cd "$destdir" - ${no_clone:+qvals} git pull - ) || edie || return - else - estepe "$(ppath2 "$destdir"): répertoire existant" - fi - elif [ -n "$no_clone" ]; then - qvals git clone "$repourl" "$destdir" - else - git__pclone "$repourl" "$destdir" "$nodevelop" || edie || return - fi - fi -} - -function: git_crone "" -function git_crone() { - repourl="${1%.git}" - [ -n "$repourl" ] || edie "Vous devez spécifier l'url du dépôt git" || return - if [ "${repourl#http://}" != "$repourl" -o "${repourl#https://}" != "$repourl" ]; then - # accès par http - mode=http - splitfsep "$repourl" :// scheme hostuserpath - splitfsep "$hostuserpath" / host userpath - splitfsep "$userpath" / user path - [ -n "$host" -a -n "$user" ] || edie "Vous devez spécifier l'hôte e.g http://host/git/repo" || return - hostuser="$scheme://$host/$user" - else - # accès par ssh - mode=ssh - splitfsep "$repourl" : userhost path - splituserhost "$userhost" user host - [ -n "$user" ] || user=git - [ -n "$host" ] || edie "Vous devez spécifier l'hôte" || return - userhost="$user@$host" - fi - [ -n "$path" ] || edie "Vous devez spécifier le chemin du dépôt git" || return - - destdir="$2" - if [ -z "$destdir" ]; then - setx destdir=basename -- "$path" - destdir="${destdir%.git}" - fi - tmpdestdir= - if [ -d "$destdir" ]; then - [ -d "$destdir/.git" ] && edie "$(ppath2 "$destdir"): un dépôt existe déjà" || return - ac_set_tmpdir tmpdestdir - fi - - if [ "$mode" == http ]; then - setx result=curl -fs "$hostuser/create?$path" || edie || return - echo "$result" - [[ "$result" == FATAL:* ]] && edie || return - if [ -n "$tmpdestdir" ]; then - setxx destname=abspath "$destdir" // basename - git clone "$hostuser/$path" "$tmpdestdir/$destname" || edie || return - mv "$tmpdestdir/$destname/.git" "$destdir" || edie || return - ac_clean "$tmpdestdir" - else - git clone "$hostuser/$path" "$destdir" || edie || return - fi - elif [ "$mode" == ssh ]; then - git_annex_use_ssh_wrapper - ssh "$userhost" create "$path" || edie || return - if [ -n "$tmpdestdir" ]; then - setxx destname=abspath "$destdir" // basename - git clone "$userhost:$path" "$tmpdestdir/$destname" || edie || return - mv "$tmpdestdir/$destname/.git" "$destdir" || edie || return - ac_clean "$tmpdestdir" - else - git clone "$userhost:$path" "$destdir" || edie || return - fi - else - edie "$mode: mode non supporté" || return - fi - git_annex_initial "$destdir" || edie || return -} diff --git a/lib/nulib/bash/nulib.sh b/lib/nulib/bash/nulib.sh deleted file mode 120000 index b22bb26..0000000 --- a/lib/nulib/bash/nulib.sh +++ /dev/null @@ -1 +0,0 @@ -../load.sh \ No newline at end of file diff --git a/lib/nulib/bash/pretty b/lib/nulib/bash/pretty deleted file mode 100644 index a46848f..0000000 --- a/lib/nulib/bash/pretty +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: pretty base_ "Affichage en couleur" -require: base diff --git a/lib/nulib/bash/sysinfos b/lib/nulib/bash/sysinfos deleted file mode 100644 index 2fa077a..0000000 --- a/lib/nulib/bash/sysinfos +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -##@cooked nocomments -module: sysinfos base_ "Informations sur le système courant" -require: base diff --git a/lib/nulib/bshell b/lib/nulib/bshell deleted file mode 100755 index 9d4e2e2..0000000 --- a/lib/nulib/bshell +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -source "$(dirname -- "$0")/load.sh" || exit 1 -export NULIBDIR NULIBINIT - -ac_set_tmpfile bashrc -echo >"$bashrc" "\ -if ! grep -q '/etc/bash.bashrc' /etc/profile; then - [ -f /etc/bash.bashrc ] && source /etc/bash.bashrc -fi -if ! grep -q '~/.bashrc' ~/.bash_profile; then - [ -f ~/.bashrc ] && source ~/.bashrc -fi -[ -f /etc/profile ] && source /etc/profile -[ -f ~/.bash_profile ] && source ~/.bash_profile - -# Modifier le PATH. Ajouter le chemin vers les scripts de support -#PATH=$(qval "$MYDIR:$PATH") - -if [ -n '$DEFAULT_PS1' ]; then - DEFAULT_PS1=$(qval "[nulib-shell] $DEFAULT_PS1") -else - if [ -z '$PS1' ]; then - PS1='\\u@\\h \\w \\$ ' - fi - PS1=\"[nulib-shell] \$PS1\" -fi -$(qvals source "$MYDIR/load.sh")" - -"$SHELL" --rcfile "$bashrc" -i -- "$@" -# note: ne pas faire exec "$SHELL", parce que sinon le fichier temporaire bashrc -# n'est pas supprimé - -ac_clean "$bashrc" -exit 0 diff --git a/lib/nulib/ddb-query_rtoinst b/lib/nulib/ddb-query_rtoinst deleted file mode 100755 index 2604d45..0000000 --- a/lib/nulib/ddb-query_rtoinst +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -"$(dirname -- "$0")/deploydb" -m toinst --run -r toinst.query_rtoinst "$@" diff --git a/lib/nulib/ddb-query_rwoinst b/lib/nulib/ddb-query_rwoinst deleted file mode 100755 index 9eb1138..0000000 --- a/lib/nulib/ddb-query_rwoinst +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -"$(dirname -- "$0")/deploydb" -m woinst --run -r woinst.query_rwoinst "$@" diff --git a/lib/nulib/ddb-query_rwyinst b/lib/nulib/ddb-query_rwyinst deleted file mode 100755 index e5bf429..0000000 --- a/lib/nulib/ddb-query_rwyinst +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -"$(dirname -- "$0")/deploydb" -m wyinst --run -r wyinst.query_rwyinst "$@" diff --git a/lib/nulib/ddb-query_xuinst b/lib/nulib/ddb-query_xuinst deleted file mode 100755 index 58037e9..0000000 --- a/lib/nulib/ddb-query_xuinst +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -"$(dirname -- "$0")/deploydb" -m uinst --run -r uinst.query_xuinst "$@" diff --git a/lib/nulib/ddb-save_objects b/lib/nulib/ddb-save_objects deleted file mode 100755 index 24778b7..0000000 --- a/lib/nulib/ddb-save_objects +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -"$(dirname -- "$0")/deploydb" --run -r base.save_objects "$@" diff --git a/lib/nulib/deploydb b/lib/nulib/deploydb deleted file mode 100755 index 9b9976b..0000000 --- a/lib/nulib/deploydb +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -MYNAME="$(basename -- "$0")" -: "${PYTHON_MAIN_MODULE:=$MYNAME}" - -MYDIR="$(dirname -- "$0")" -if [ -n "$PYTHONPATH" ]; then PYTHONPATH="$MYDIR/python:$PYTHONPATH" -else PYTHONPATH="$MYDIR/python" -fi -export PYTHONPATH - -exec python2.7 -m "$PYTHON_MAIN_MODULE" "$@" diff --git a/lib/nulib/deploydb.conf b/lib/nulib/deploydb.conf deleted file mode 100644 index ea2032e..0000000 --- a/lib/nulib/deploydb.conf +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 mode: conf -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -host localhost host=localhost. diff --git a/lib/nulib/doc/deploydb/index.md b/lib/nulib/doc/deploydb/index.md deleted file mode 100644 index bd48877..0000000 --- a/lib/nulib/doc/deploydb/index.md +++ /dev/null @@ -1,259 +0,0 @@ -`deploydb` est un moyen de décrire des informations de déploiement ou de -configuration à propos de certains objets (hôtes, modules, webapps, woapps, -etc.) - -# Syntaxe - -Le format du fichier de configuration est volontairement simple. Toute la -logique est implémentée dans les clients qui accèdent à l'information stockée - -Le fichier contient deux sortes d'informations: -* définition d'objet -* définition de faits - -Les lignes vides ou commençant par '#' sont ignorées. -Si une ligne commence par un espace, elle est fusionnée avec la ligne -précédente. - -## Définition d'objet - -Une définition d'objet a le format suivant: -~~~ -otype oid[=values] [ATTRS...] [LINKS...] -~~~ - -`otype` -: type d'objet à créer ou à mettre à jour - -`oid` -: identifiant de l'objet à créer ou à mettre à jour - -`values` -: valeurs de l'objet, séparées par des virgules - -`ATTR` -: attribut de l'objet - -`LINK` -: définition d'objet lié - -Une définition d'attribut a l'un des formats suivants: -~~~ -name[=value] -name+[=value] -name-[=value] -name%[=value] -~~~ - -Les attributs sont multivalués, et par défaut, on rajoute la nouvelle valeur aux -valeurs existantes sauf si elle existe déjà dans l'attribut. - -value vaut par défaut 'true', ce qui n'est pas la même chose qu'une valeur -vide. comparer les deux définitions suivantes: -~~~ -first # first vaut 'true' -second= # second vaut '' -~~~ - -Les types de mise à jour valides sont: -* `=` ajout de la valeur si l'attribut ne la contient pas déjà -* `+=` ajout inconditionnel d'une valeur à l'attribut -* `-=` suppression d'une valeur de l'attribut -* `%=` remettre à zéro l'attribut d'abord - -Ainsi, les définitions suivantes sont équivalentes deux à deux: -~~~ -attr=value attr=value # le doublon est supprimé -attr=value # c'est comme si on ne spécifie la valeur qu'une seule fois - -attr=value attr%=first attr=second -attr=value attr-=value attr=first attr=second -~~~ - -Une définition de lien a le format suivant: -~~~ --otype oids... [ATTRS...] -~~~ - -`otype` -: type de l'objet lié - -`oids` -: liste d'identifiants d'objets liés séparés par des virgules - -`ATTR` -: attribut à rajouter à la définition du lien - -Voici un exemple complet: -~~~ -humain bob nom=Payet prenom=Robert - age=42 - desc="un humain qui a un père, une mère et deux voitures" - -humain eric type=pere - -humain martine type=mere - -vehicule titine,bumblebee -humain eric nom=Payet prenom=Eric -humain martine nom=Payet prenom="Martine Joséphine" -vehicule titine marque=Citroen immatriculation=BX-467-PM -vehicule bumblebee marque=Camaro type=autobot -~~~ - -## Définition de faits - -Un fait est l'affirmation d'un lien d'action ou d'état entre deux objets, -décrit par un verbe. Par exemple, pour décrire le fait que bob mange une -tarte, on écrirait: -~~~ -humain bob -aliment tarte - --humain bob - mange -aliment tarte -~~~ - -Une définition de fait a le format suivant: -~~~ --sotype soids... [DEFATTRS...] - verb -totype toids... [FACTATTRS...] - ... -~~~ - -`sotype` -`totype` -: types d'objets source et cible - -`soid` -`toid` -: identifiants des objets source et cible - -`verb` -: identifiant du lien entre la source et la destination. en général, il s'agit - d'un verbe d'action ou d'état conjugué à la 3ème personne du singulier. - - si le verbe commence par `~` alors la définition est inversée. par exemple, - les deux faits suivants sont rigoureusement équivalents: - ~~~ - -otype src verb -otype dest - -otype dest ~verb -otype src - ~~~ - cela permet de supporter les cas où la définition inverse est plus facile. - -`DEFATTR` -: attribut pour tous les faits définis dans cette déclaration - -`FACTATTR` -: attribut spécifique au fait défini - -# deploydb - -Le script `deploydb` permet d'interroger la base de données ou de lancer une -fonction pour traiter le contenu de la base de données - -Dans ce document, `DEPLOYDBDIR` désigne le répertoire du script `deploydb` - -Options -`-c, --config CONFIG` -: spécifier un fichier de configuration à charger. la valeur par défaut est - `deploydb.conf` - - si le fichier de configuration n'est pas spécifié ou est spécifié sans chemin, - `deploydb:path` est initialisé avec la valeur par défaut suivante: - ~~~ - ~/etc/deploydb:/var/local/deploydb:DEPLOYDBDIR - ~~~ - - sinon, `deploydb:path` est initialisé avec le répertoire de CONFIG - -`-m, --module MODULE` -: spécifier un module supplémentaire à charger. le module python effectivement - cherché dans le path et chargé est `MODULE_module`. La liste par défaut des - modules à charger contient un seul élément, `base`, ce qui signifie que le - module `base_module` est chargé. Les modules permettent de définir de la - logique pour les objets, ou l'outil à lancer. - -`-r, --func FUNC` -: spécifier le nom de la fonction à lancer après le chargement des modules et - des fichiers de configuration. La valeur par défaut est `base.query`, qui - interroge la base de données et affiche son contenu. - - La fonction est appelée avec les arguments de la ligne de commande, sans les - options, qui sont traitées en amont. - -`--dump` -: afficher le contenu complet de la base de données, pour débugger. ignorer - l'option `-r` - -# Module base - -Le module `base` chargé par défaut définit -* les objets de type `deploydb`, `host` -* la fonction `query()` - -## deploydb - -En créant des instances de cet objet avec des identifiants normalisés, il est -possible de modifier la configuration. - -`deploydb path dir=CONFDIR` -: définir les répertoires de recherche pour les fichiers de configuration - spécifiés sans chemin. dans ce document, cette valeur est appelée - `deploydb:path` - -`deploydb include file=CONFIG` -: définir des fichiers de configuration supplémentaire à charger. Si les - fichiers sont spécifiés sans chemin, il sont cherchés dans `deploydb:path` - -`deploydb loadcsv file=CSVFILE` -: charger des définitions d'objets depuis des fichiers CSV. Si les fichiers sont - spécifiés sans chemin, ils sont cherchés dans `deploydb:path` - - L'attribut `otype_col` qui vaut par défaut `otype` permet de définir la - colonne qui contient le type d'objet. L'attribut `otype` permet de spécifier - le type d'objet si la colonne n'existe pas dans le fichier. - - L'attribut `oid_col` qui vaut par défaut `oid` permet de définir la colonne - qui contient l'identifiant d'objet à créer. - - Toutes les autres colonnes du fichier sont utilisées pour définir les - attributs des objets. - -## host - -Cet objet définit un hôte vers lequel on peut par exemple déployer un artifact. - -`host * domain=DOMAIN.TLD` -: définir un domaine par défaut pour tous les hôtes spécifiés sans domaine - -Les attributs suivants sont supportés: - -`host` -: nom d'hôte. le domaine par défaut est ajouté le cas échéant. pour ne pas - rajouter un domaine, spécifier le nom avec un point final e.g `localhost.` - -`hostname` -: nom d'hôte sans le domaine - -`domain` -: domaine sans le nom d'hôte - -`ip` -: adresse IP de l'hôte - -Si seul `host` est spécifié, `hostname` et `domain` sont calculés en fonction de -sa valeur. - -Si seul `hostname` est spécifié, `host` est calculé en fonction de sa valeur et -de celle de `domain` - -Si `ip` n'est pas spécifié, une résolution DNS est effectuée pour déterminer -l'adresse de `host` - -Si l'objet est défini sans valeurs, alors la valeur finale est la liste des hôtes. - -## base.query() - -Interroger la base de données - -XXX déterminer le format des requêtes - --*- coding: utf-8 mode: markdown -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8:noeol:binary \ No newline at end of file diff --git a/lib/nulib/doc/index.md b/lib/nulib/doc/index.md deleted file mode 100644 index a478e7e..0000000 --- a/lib/nulib/doc/index.md +++ /dev/null @@ -1,21 +0,0 @@ -# nulib - -nulib est une librairie de fonctions shell et python, ainsi qu'un -ensemble d'utilitaires basés sur ces librairies - -## Prérequis - -nulib est conçu pour tourner sur des versions récentes de Linux et -requière bash 4.1+, GNUawk 3.1+ et Python 2.6 - -Les systèmes cibles sont Debian 8+ (jessie, stretch) et Oracle Linux 6+ - -| Système | bash | GNUawk | Python | -+----------------+------+--------+--------+ -| RHEL6, OL6 | 4.1 | 3.1 | 2.6 | -| RHEL7, OL7 | 4.2 | 4.0 | 2.7 | -| Debian 8 | 4.3 | 4.1 | 2.7 | -| Debian 9 | 4.4 | 4.1 | 2.7 | - - --*- coding: utf-8 mode: markdown -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8:noeol:binary \ No newline at end of file diff --git a/lib/nulib/load.sh b/lib/nulib/load.sh deleted file mode 100644 index b6ad3c1..0000000 --- a/lib/nulib/load.sh +++ /dev/null @@ -1,176 +0,0 @@ -##@cooked comments # -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -## Charger nulib et rendre disponible les modules bash, awk et python -##@cooked nocomments -# Ce fichier doit être sourcé en premier. Si ce fichier n'est pas sourcé, alors -# le répertoire nulib doit être disponible dans le répertoire du script qui -# inclue ce fichier. -# Une fois ce fichier sourcé, les autres modules peuvent être importés avec -# require:() ou import:() e.g. -# source /etc/nulib.sh || exit 1 -# import: other_modules -# ou pour une copie locale de nulib: -# source "$(dirname "$0")/nulib/load.sh" || exit 1 -# import: other_modules - -# vérifier version minimum de bash -if [ "x$BASH" = "x" ]; then - echo "ERROR: nulib: this script requires bash" - exit 1 -fi - -function base_eerror() { echo "ERROR: $*" 1>&2; } -function base_die() { [ $# -gt 0 ] && base_eerror "$*"; exit 1; } -function base_edie() { [ $# -gt 0 ] && base_eerror "$*"; return 1; } -function base_delpath() { local _qdir="${1//\//\\/}"; eval "export ${2:-PATH}; ${2:-PATH}"'="${'"${2:-PATH}"'#$1:}"; '"${2:-PATH}"'="${'"${2:-PATH}"'%:$1}"; '"${2:-PATH}"'="${'"${2:-PATH}"'//:$_qdir:/:}"; [ "$'"${2:-PATH}"'" == "$1" ] && '"${2:-PATH}"'='; } -function base_addpath() { local _qdir="${1//\//\\/}"; eval "export ${2:-PATH}; "'[ "${'"${2:-PATH}"'#$1:}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'%:$1}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'//:$_qdir:/:}" == "$'"${2:-PATH}"'" -a "$'"${2:-PATH}"'" != "$1" ] && '"${2:-PATH}"'="${'"${2:-PATH}"':+$'"${2:-PATH}"':}$1"'; } -function base_inspathm() { local _qdir="${1//\//\\/}"; eval "export ${2:-PATH}; "'[ "${'"${2:-PATH}"'#$1:}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'%:$1}" == "$'"${2:-PATH}"'" -a "${'"${2:-PATH}"'//:$_qdir:/:}" == "$'"${2:-PATH}"'" -a "$'"${2:-PATH}"'" != "$1" ] && '"${2:-PATH}"'="$1${'"${2:-PATH}"':+:$'"${2:-PATH}"'}"'; } -function base_inspath() { base_delpath "$@"; base_inspathm "$@"; } - -if [ ${BASH_VERSINFO[0]} -ge 5 -o \( ${BASH_VERSINFO[0]} -eq 4 -a ${BASH_VERSINFO[1]} -ge 1 \) ]; then : -elif [ -n "$NULIB_IGNORE_BASH_VERSION" ]; then : -else base_die "nulib: bash 4.1+ is required" -fi - -# Calculer emplacement de nulib -NULIBDIR="@@dest@@/lib/nulib" -if [ "$NULIBDIR" = "@@""dest""@@/lib/nulib" ]; then - # La valeur "@@"dest"@@" n'est remplacée que dans la copie de ce script - # faite dans /etc. Sinon, il faut toujours faire le calcul. Cela permet de - # déplacer la librairie n'importe ou sur le disque, ce qui est - # particulièrement intéressant quand on fait du déploiement. - NULIBDIR="${BASH_SOURCE[0]}" - if [ -f "$NULIBDIR" -a "$(basename -- "$NULIBDIR")" == load.sh ]; then - # Fichier sourcé depuis nulib/ - NULIB_SOURCED=1 - NULIBDIR="$(dirname -- "$NULIBDIR")" - elif [ -f "$NULIBDIR" -a "$(basename -- "$NULIBDIR")" == nulib.sh ]; then - # Fichier sourcé depuis nulib/bash - NULIB_SOURCED=1 - NULIBDIR="$(dirname -- "$NULIBDIR")/.." - else - # Fichier non sourcé. Tout exprimer par rapport au script courant - NULIB_SOURCED= - NULIBDIR="$(dirname -- "$0")" - if [ -d "$NULIBDIR/nulib" ]; then - NULIBDIR="$NULIBDIR/nulib" - elif [ -d "$NULIBDIR/lib/nulib" ]; then - NULIBDIR="$NULIBDIR/lib/nulib" - fi - fi -fi -NULIBDIR="$(cd "$NULIBDIR" 2>/dev/null; pwd)" -NULIBDIRS=("$NULIBDIR/bash") - -# marqueur pour vérifier que nulib a réellement été chargé. il faut avoir $NULIBINIT == $NULIBDIR -# utilisé par le module base qui doit pouvoir être inclus indépendamment -NULIBINIT="$NULIBDIR" - -## Modules bash -NULIB_LOADED_MODULES=(nulib.sh) -NULIB_DEFAULT_MODULES=(base pretty sysinfos) - -# Si cette variable est non vide, require: recharge toujours le module, même -# s'il a déjà été chargé. Cette valeur n'est pas transitive: il faut toujours -# recharger explicitement tous les modules désirés -NULIB_FORCE_RELOAD= - -function nulib__define_functions() { - function nulib_check_loaded() { - local module - for module in "${NULIB_LOADED_MODULES[@]}"; do - [ "$module" == "$1" ] && return 0 - done - return 1 - } - function module:() { - NULIB_MODULE="$1" - NULIB_FUNC_PREFIX="$2" - if ! nulib_check_loaded "$1"; then - NULIB_LOADED_MODULES=("${NULIB_LOADED_MODULES[@]}" "$1") - fi - } - function function:() { - if [ -n "$NULIB_ALLOW_IMPORT" -a -n "$NULIB_FUNC_PREFIX" -a "${1#$NULIB_FUNC_PREFIX}" != "$1" ]; then - eval "function ${1#$NULIB_FUNC_PREFIX}() { $1 \"\$@\"; }" - fi - } -} - -function nulib__require:() { - local nr__module nr__nulibdir nr__found - [ $# -gt 0 ] || set DEFAULTS - - # sauvegarder valeurs globales - local nr__orig_module="$NULIB_MODULE" nr__orig_func_prefix="$NULIB_FUNC_PREFIX" - NULIB_MODULE= - NULIB_FUNC_PREFIX= - - # garder une copie de la valeur originale et casser la transitivité - local nr__force_reload="$NULIB_FORCE_RELOAD" - local NULIB_FORCE_RELOAD - - local nr__should_import="$NULIB_SHOULD_IMPORT" nr__allow_import="$NULIB_ALLOW_IMPORT" nr__recursive_import="$NULIB_RECURSIVE_IMPORT" - for nr__module in "$@"; do - local NULIB_SHOULD_IMPORT="$nr__should_import" NULIB_ALLOW_IMPORT="$nr__allow_import" NULIB_RECURSIVE_IMPORT="$nr__recursive_import" - [ -n "$NULIB_SHOULD_IMPORT" ] && NULIB_ALLOW_IMPORT=1 - nr__found= - for nr__nulibdir in "${NULIBDIRS[@]}"; do - if [ -f "$nr__nulibdir/$nr__module" ]; then - nr__found=1 - if [ -n "$nr__force_reload" ] || ! nulib_check_loaded "$nr__module"; then - NULIB_LOADED_MODULES=("${NULIB_LOADED_MODULES[@]}" "$nr__module") - source "$nr__nulibdir/$nr__module" || base_die - fi - break - fi - done - if [ -z "$nr__found" -a "$nr__module" == DEFAULTS ]; then - for nr__module in "${NULIB_DEFAULT_MODULES[@]}"; do - if [ -f "$nr__nulibdir/$nr__module" ]; then - nr__found=1 - if [ -n "$nr__force_reload" ] || ! nulib_check_loaded "$nr__module"; then - NULIB_LOADED_MODULES=("${NULIB_LOADED_MODULES[@]}" "$nr__module") - source "$nr__nulibdir/$nr__module" || base_die - fi - else - break - fi - done - fi - [ -n "$nr__found" ] || base_die "nulib: unable to find module $nr__module in (${NULIBDIRS[*]})" - done - - # restaurer valeurs globales - NULIB_MODULE="$nr__orig_module" - NULIB_FUNC_PREFIX="$nr__orig_func_prefix" -} - -function require:() { - [ -z "$NULIB_NO_DISABLE_SET_X" ] && [[ $- == *x* ]] && { set +x; local NULIB_REQUIRE_SET_X=1; }; if [ -n "$NULIB_REQUIRE_SET_X" ]; then [ -n "$NULIB_REQUIRE_SET_X_RL1" ] || local NULIB_REQUIRE_SET_X_RL1; local NULIB_REQUIRE_SET_X_RL2=$RANDOM; [ -n "$NULIB_REQUIRE_SET_X_RL1" ] || NULIB_REQUIRE_SET_X_RL1=$NULIB_REQUIRE_SET_X_RL2; fi # désactiver set -x de manière réentrante - local NULIB_SHOULD_IMPORT - [ -n "$NULIB_RECURSIVE_IMPORT" -a -n "$NULIB_ALLOW_IMPORT" ] && NULIB_SHOULD_IMPORT=1 - local NULIB_ALLOW_IMPORT NULIB_RECURSIVE_IMPORT NULIB_FUNC_PREFIX - nulib__define_functions - nulib__require: "$@" - [ -n "$NULIB_REQUIRE_SET_X" -a "$NULIB_REQUIRE_SET_X_RL1" == "$NULIB_REQUIRE_SET_X_RL2" ] && set -x - return 0 -} - -function import:() { - [ -z "$NULIB_NO_DISABLE_SET_X" ] && [[ $- == *x* ]] && { set +x; local NULIB_REQUIRE_SET_X=1; }; if [ -n "$NULIB_REQUIRE_SET_X" ]; then [ -n "$NULIB_REQUIRE_SET_X_RL1" ] || local NULIB_REQUIRE_SET_X_RL1; local NULIB_REQUIRE_SET_X_RL2=$RANDOM; [ -n "$NULIB_REQUIRE_SET_X_RL1" ] || NULIB_REQUIRE_SET_X_RL1=$NULIB_REQUIRE_SET_X_RL2; fi # désactiver set -x de manière réentrante - local NULIB_SHOULD_IMPORT=1 NULIB_ALLOW_IMPORT NULIB_RECURSIVE_IMPORT NULIB_FUNC_PREFIX - nulib__define_functions - nulib__require: "$@" - [ -n "$NULIB_REQUIRE_SET_X" -a "$NULIB_REQUIRE_SET_X_RL1" == "$NULIB_REQUIRE_SET_X_RL2" ] && set -x - return 0 -} - -## Autres modules -base_inspath "$NULIBDIR/awk" AWKPATH; export AWKPATH -base_inspath "$NULIBDIR/python" PYTHONPATH; export PYTHONPATH - -## Auto import DEFAULTS -nulib__define_functions -if [ -n "$NULIB_SOURCED" -a -z "$NULIB_NO_IMPORT_DEFAULTS" ]; then - import: DEFAULTS -fi diff --git a/lib/nulib/nulib_config.py b/lib/nulib/nulib_config.py deleted file mode 100644 index 47c2751..0000000 --- a/lib/nulib/nulib_config.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -# fichier d'exemple pour la configuration de nulib, à placer quelque part dans -# PYTHONPATH - -"""Configuration de nulib. - -Ce module contient des variables qui servent à configurer le comportement de -nulib. -""" - -# Liste des modules de base qui sont importés automatiquement avec -# from nulib_py import * -#MODULES = () - -# L'importation de nulib.base.encoding provoque-t-il la configuration de la -# locale courante? -#SET_LOCALE = True - -# Encoding par défaut, s'il est impossible de le détecter autrement. -#DEFAULT_INPUT_ENCODING = 'utf-8' -#DEFAULT_OUTPUT_ENCODING = 'utf-8' - -# Faut-il supprimer le répertoire courant de sys.path? -#CLEAN_SYSPATH = True diff --git a/lib/nulib/pshell b/lib/nulib/pshell deleted file mode 100755 index 36d7b49..0000000 --- a/lib/nulib/pshell +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -# -*- coding: utf-8 mode: sh -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -scriptdir="$(dirname -- "$0")" -if [ -z "$NULIBDIR" -o "$NULIBDIR" != "$NULIBINIT" ]; then - # charger nulib si ce n'est pas déjà le cas - source "$scriptdir/load.sh" -fi - -DEFAULT_PYTHON=python2.7 - -# -echo ">>> Shell Python pour nulib" -exec "${PYTHON:-$DEFAULT_PYTHON}" -i -c "$(<"$scriptdir/pshell.py")" diff --git a/lib/nulib/pshell.py b/lib/nulib/pshell.py deleted file mode 100644 index 3938e2b..0000000 --- a/lib/nulib/pshell.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 -# initialisation pour pshell - -import sys, os -from os import path -import pdb - -from nulib import * -from nulib.web import ui -from nulib.web import bootstrap as bs diff --git a/lib/nulib/python/.gitignore b/lib/nulib/python/.gitignore deleted file mode 100644 index b9b295a..0000000 --- a/lib/nulib/python/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/build/ -*~ -*.py[co] diff --git a/lib/nulib/python/deploydb/__init__.py b/lib/nulib/python/deploydb/__init__.py deleted file mode 100644 index 9d853e8..0000000 --- a/lib/nulib/python/deploydb/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = () - diff --git a/lib/nulib/python/deploydb/__main__.py b/lib/nulib/python/deploydb/__main__.py deleted file mode 100644 index 0f54021..0000000 --- a/lib/nulib/python/deploydb/__main__.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Interroger la base deploydb -""" - -import logging; log = logging.getLogger(__name__) -import sys -from os import path -from argparse import ArgumentParser - -from .utils import * -from .parser import Parser -from .objects import catalog - -DEFAULT_CONFIG = 'deploydb.conf' -USER_CONFDIR = path.expanduser('~/etc/deploy') -SYSTEM_CONFDIR = '/var/local/deploy' - -DEFAULT_MODULES = ['base'] -DEFAULT_FUNC = 'base.query' -DEFAULT_ACTION = 'run' - -SCRIPTDIR = path.dirname(path.dirname(path.dirname(__file__))) - -################################################################################ -# Programme principal - -logging.basicConfig() - -from argparse import ArgumentParser, HelpFormatter -if sys.argv[1:2] == ['--compat']: - # Avec l'argument --compat, désactiver la classe FancyHelpFormatter qui - # se base sur une API non documentée - sys.argv = sys.argv[0:1] + sys.argv[2:] - FancyHelpFormatter = HelpFormatter -else: - class FancyHelpFormatter(HelpFormatter): - """Comme HelpFormatter, mais ne touche pas aux lignes qui commencent par les - caractères '>>>'. Cela permet de mixer du texte formaté et du texte non - formaté. - """ - def _fill_text(self, text, width, indent): - return ''.join([indent + line for line in text.splitlines(True)]) - def _split_lines(self, text, width): - lines = [''] - for line in text.splitlines(): - if line.startswith('>>>'): - lines.append(line) - lines.append('') - else: - lines[-1] += '\n' + line - lines = filter(None, lines) - texts = [] - for line in lines: - if line.startswith('>>>'): - line = line[3:] - if line: texts.append(line) - else: - texts.extend(super(FancyHelpFormatter, self)._split_lines(line, width)) - return texts -AP = ArgumentParser( - usage=u"deploydb args...", - description=__doc__, - formatter_class=FancyHelpFormatter, -) -AP.set_defaults( - missing_ok=False, - modules=DEFAULT_MODULES, - func=DEFAULT_FUNC, - resolve=True, - action=DEFAULT_ACTION, -) -AP.add_argument('-c', '--config', dest='config', - help=u"Spécifier le fichier de configuration à utiliser. S'il s'agit d'un nom sans chemin, il est recherché dans les répertoires par défaut.") -AP.add_argument('--missing-ok', action='store_true', dest='missing_ok', - help=u"Sortir sans erreur si le fichier de configuration n'est pas trouvé") -AP.add_argument('-m', '--module', action='append', dest='modules', metavar='MODULE', - help=u"Spécifier un module à charger. Cette option peut être spécifiée autant de fois que nécessaire. Par défaut, seul le module 'base' est chargé.") -AP.add_argument('-r', '--func', dest='func', - help=u"Spécifier la fonction à lancer après le chargement de la base de données. La valeur par défaut est %s" % DEFAULT_FUNC) -AP.add_argument('--no-resolve', action='store_false', dest='resolve', - help=u"Ne pas résoudre les objets avant de lancer la fonction. Cette option avancée ne devrait pas avoir besoin d'être utilisée.") -AP.add_argument('--run', action='store_const', dest='action', const='run', - help=u"Lancer la fonction spécifiée avec l'option --func") -AP.add_argument('--dump', action='store_const', dest='action', const='dump', - help=u"Afficher le contenu de la base de données") -AP.add_argument('args', nargs='*') -o = AP.parse_args() - -# charger les modules -MODULES = {} -for module in o.modules: - MODULES[module] = __import__('%s_module' % module, globals()) - -# charger la configuration -config = o.config -if config is not None and ('/' in config or path.isfile(config)): - deploydb_path = [path.abspath(path.dirname(config))] -else: - deploydb_path = [USER_CONFDIR, SYSTEM_CONFDIR, SCRIPTDIR] - cname = config if config is not None else DEFAULT_CONFIG - config = find_in_path(cname, deploydb_path) - if config is None and not o.missing_ok: - raise ValueError("Impossible de trouver le fichier de configuration %s" % cname) - -catalog.create_object('deploydb', 'path', dir=deploydb_path) -Parser(config) - -dd_path = catalog.get('deploydb', 'path') -dd_include = catalog.get('deploydb', 'include', None, create=False) -if dd_include is not None: - included = set([config]) - while True: - done = True - for file in dd_include.get('file', ()): - # cette valeur peut changer au fur et à mesure que les fichiers sont - # inclus. la recharger systématiquement - deploydb_path = dd_path.get('dir', ()) - pf = find_in_path(file, deploydb_path) - if pf in included: continue - included.add(pf) - if pf is not None: - Parser(pf) - # il y a peut-être de nouveaux fichiers à inclure. configurer - # une nouvelle itération - done = False - else: - log.warning("deploydb:include: %s: Fichier introuvable", file) - if done: break - -deploydb_path = dd_path.get('dir', ()) -dd_loadcsv = catalog.get('deploydb', 'loadcsv', None, create=False) -if dd_loadcsv is not None: - otype = dd_loadcsv.first('otype', None) - otype_col = dd_loadcsv.first('otype_col', 'otype') - oid_col = dd_loadcsv.first('oid_col', 'oid') - for file in dd_loadcsv.get('file', ()): - pf = find_in_path(file, deploydb_path) - if pf is not None: - catalog.load_csv(pf, otype, otype_col, oid_col) - else: - log.warning("deploydb:loadcsv: %s: Fichier introuvable", file) - -# actions -if o.resolve: catalog.resolve() - -if o.action == 'run': - names = o.func.split('.') - func = MODULES[names[0]] - for name in names[1:]: - func = getattr(func, name) - func(*o.args) - -elif o.action == 'dump': - catalog.dump() diff --git a/lib/nulib/python/deploydb/base_module.py b/lib/nulib/python/deploydb/base_module.py deleted file mode 100644 index b63c86d..0000000 --- a/lib/nulib/python/deploydb/base_module.py +++ /dev/null @@ -1,239 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'Deploydb', - 'Host', - 'host_matcher', 'hostname_matcher', -) - -import logging; log = logging.getLogger(__name__) -import os, sys, socket, csv -from os import path - -from .utils import * -from .objects import XT, fileP, pathP, lowerP, Object, catalog - -################################################################################ -# Configuration de deploydb - -class Deploydb(Object): - ATTRS = XT(Object, dir=pathP, file=fileP) - -################################################################################ -# Gestion des groupes - -class Group(Object): - """Groupe d'objets liés. - - Lors du resolve, toutes les variables définies pour le groupe sont propagées - aux objets liés si elles ne sont pas définies dans ces objets. - - dans l'exemple suivant: - ~~~ - group mymodules shared=all - -module module1,module2 - moduledir=~/wop/php - -host host1,host2 - domain=long.tld - ~~~ - la variable shared est initialisée pour module1,module2,host1,host2 alors que - la variable moduledir ne concerne que module1,module2 et la variable domain ne - concerne que host1,host2 - """ - - __RESOLVE_FIRST__ = True - - ATTRS = XT(Object) - - def _resolve(self, catalog): - for otype, links in self.links.items(): - for link in links: - object = link.resolve(catalog, resolve=False) - object.set_defaults(link.attrs) - object.set_defaults(self.attrs) - -################################################################################ -# Gestion des hôtes - -def withdomain(h): return '.' in h - -def fix_host(host, domain=None): - if host.endswith('.'): - host = host[:-1] - elif domain and not withdomain(host): - host = "%s.%s" % (host, domain) - return host -def strip_hostname(a): - pos = a.find('.') - if pos == -1: return None - else: return a[pos + 1:] -def strip_domain(a): - pos = a.find('.') - if pos == -1: return a - else: return a[:pos] - -def match_host(qhost, object): - qhost = lowerP.parse(qhost) - if withdomain(qhost): # host avec chemin - qhost = fix_host(qhost) - return qhost in object.get('host', ()) - else: # nom de host - return qhost in object.get('hostname', ()) -def host_matcher(qhost): - return lambda object: match_host(qhost, object) - -def match_hostname(qhost, object): - qhost = lowerP.parse(qhost) - qhost = path.basename(qhost) - return qhost in object.get('hostname', ()) -def hostname_matcher(qhost): - return lambda object: match_hostname(qhost, object) - -class Host(Object): - ATTRS = XT(Object, - values=lowerP, - host=lowerP, hostname=lowerP, domain=lowerP, ip=None) - - def _resolve(self, catalog): - if self.oid == '*': return - default = catalog.get(self.otype, '*', None, False) - - hosts = self.get('host', []) - hostnames = self.get('hostname', ()) - domains = self.get('domain', ()) - - search_basedir = self.get('search_basedir', ('dirs',)) - files = 'files' in search_basedir - dirs = 'dirs' in search_basedir - basedir = self.get('basedir', None) - if basedir is not None: - hostdirs = self.resolve_basedir(basedir, files=files, dirs=dirs) - hosts.extend(map(path.basename, hostdirs)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - hostdirs = self.resolve_filespec(dirspec, dirs=True) - hosts.extend(map(path.basename, hostdirs)) - filespec = self.get('filespec', None) - if filespec is not None: - hostfiles = self.resolve_filespec(filespec, files=True) - hosts.extend(map(path.basename, hostfiles)) - - if hosts: - # générer hostname et domain à partir host - if not domains: - domains = set(map(strip_hostname, hosts)) - domains = filter(lambda d: d is not None, domains) - if not domains and default is not None: - domains = default.get('domain', ()) - domains = filter(None, domains) - if domains: domains = self.domain = set(domains) - - hostnames = map(strip_domain, hostnames or hosts) - if hostnames: hostnames = self.hostname = set(hostnames) - - if domains: - tmphosts = [] - for host in hosts: - for domain in domains: - tmphosts.append(fix_host(host, domain)) - else: - tmphosts = map(fix_host, hosts) - hosts = self.host = set(tmphosts) - - else: - # générer host à partir de hostname et domain - if not domains and default is not None: - domains = default.get('domain', ()) - if domains: domains = self.domain = set(domains) - - if not hostnames: hostnames = [self.oid] - hostnames = map(strip_domain, hostnames) - if hostnames: self.hostname = hostnames - - if domains: - hosts = [] - for domain in domains: - for hostname in hostnames: - hosts.append('%s.%s' % (hostname, domain)) - else: - hosts = hostnames - if hosts: hosts = self.host = set(hosts) - - ips = self.get('ip', []) - if not ips: - for host in hosts: - try: - hostnames, aliases, ipaddrs = socket.gethostbyname_ex(host) - ips.extend(ipaddrs) - except socket.herror, e: - log.error("error resolving %s: %s, %s", host, e[0], e[1]) - except socket.gaierror, e: - log.error("error resolving %s: %s, %s", host, e[0], e[1]) - if ips: ips = self.ip = set(ips) - - if not self.values: - self.values = hosts - -def save_hosts(*args): - """Ecrire les hôtes définis sous forme de liste csv, qu'il est possible - d'exploiter avec 'deploydb loadcsv' - - plus ou moins équivalent à `save_objects host` mais les champs sont dans un - ordre ergonomique (cette fonction a été écrite en premier, elle est gardée - pour l'historique) - """ - # tout d'abord déterminer tous les attributs nécessaires - headers = ['host', 'hostname', 'domain', 'ip'] - hosts = catalog.find_objects('host') - for host in hosts: - for name in host.attrs.keys(): - if name not in headers: headers.append(name) - # ensuite les écrire - rows = [] - for host in hosts: - if host.oid == '*': continue - row = [host.otype, host.oid] - for name in headers: - row.append(';'.join(host.get(name, ()))) - rows.append(row) - headers[0:0] = ['otype', 'oid'] - # écrire le résultat - out = csv.writer(sys.stdout) - out.writerow(headers) - out.writerows(rows) - -################################################################################ -# Actions - -def save_objects(*args): - """Ecrire les objets sous forme de liste csv, qu'il est possible d'exploiter - avec 'deploydb loadcsv' - - usage: save_objects [otype [oids...]] - """ - otypes = listof(args[0] if args[0:1] else None, None) - if otypes is not None: otypes = flattenstr(otypes) - oids = args[1:] or None - objects = catalog.find_objects(otypes, oids, create=False) - # tout d'abord déterminer tous les attributs nécessaires - headers = ['otype', 'oid'] - for object in objects: - for name in object.known_rw_attrs: - if name not in headers: headers.append(name) - for object in objects: - for name in object.misc_attrs: - if name not in headers: headers.append(name) - # ensuite les écrire - rows = [] - for object in objects: - row = [] - for name in headers: - row.append(';'.join(object.get(name, ()))) - rows.append(row) - # écrire le résultat - out = csv.writer(sys.stdout) - out.writerow(headers) - out.writerows(rows) - -def query(*args): - pass diff --git a/lib/nulib/python/deploydb/expr.py b/lib/nulib/python/deploydb/expr.py deleted file mode 100644 index 45c5005..0000000 --- a/lib/nulib/python/deploydb/expr.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'ANY', 'ALL', 'NONE', 'EXISTS', -) - -from .utils import * - -class Expr(object): - _terms, terms = None, property(lambda self: self._terms) - - def __init__(self, *terms): - self._terms = listof(terms or None, None) - - @staticmethod - def match_dict(dict, object): - for name, value in dict.items(): - one_match = False - attr_values = object.get(name, ()) - for value in listof(value): - if value in attr_values: - one_match = True - break - if not one_match: return False - return True - - @staticmethod - def match_term(term, object): - """tester le terme par rapport à l'objet. - * si c'est None, retourner vrai - * si c'est un dictionnaire, tous les attributs cités doivent avoir au - moins une des valeurs fournies - * si c'est une fonction (plus exactement un objet appelable), elle doit - prendre l'unique argument (object) et retourner True si l'objet - correspond - * si c'est une liste, la traiter comme ANY(*term) - * si c'est une instance de Expr, déléguer le traitement à sa méthode - match() - * sinon, lancer une exception. - """ - if term is None: - return True - elif isinstance(term, dict): - return Expr.match_dict(term, object) - elif callable(term): - return term(object) - elif isseq(term): - term = ANY(*term) - if isinstance(term, Expr): - return term.match(object) - else: - raise ValueError("Argument invalide %r" % term) - - def match(self, object): - return False - -class ANY(Expr): - """construire l'objet avec une liste de termes. au moins un des termes doit - correspondre - """ - def match(self, object, lazy=True): - result = False - if self.terms is None: return result - for term in self.terms: - if self.match_term(term, object): - result = True - if lazy: break - return result - -class ALL(Expr): - """construire l'objet avec une liste de termes. tous les termes doivent - correspondrent - """ - def match(self, object, lazy=True): - result = True - if self.terms is None: return result - for term in self.terms: - if not self.match_term(term, object): - result = False - if lazy: break - return result - -class NONE(Expr): - """construire l'objet avec une liste de termes. aucun des termes ne doit - correspondre - """ - def match(self, object, lazy=False): - result = True - if self.terms is None: return result - for term in self.terms: - if self.match_term(term, object): - result = False - if lazy: break - return result - -class EXISTS(Expr): - """construire l'objet avec une liste d'attributs. tous les attributs doivent - exister - """ - def match(self, object, lazy=True): - result = True - if self.terms is None: return result - for term in self.terms: - if not object.has_key(term): - result = False - if lazy: break - return result diff --git a/lib/nulib/python/deploydb/lexer.py b/lib/nulib/python/deploydb/lexer.py deleted file mode 100644 index e8fcc49..0000000 --- a/lib/nulib/python/deploydb/lexer.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Lexer pour un fichier de configuration - -Syntaxe: -~~~ -# comment -object id var=value - continuation="line starting with a space" - -link otherid - --link objectref1 predicate -link objectref2 -~~~ -""" - -__all__ = ( - 'Lexer', -) - -import re - -class EOL(object): - """fin de ligne""" - __repr__ = __string__ = lambda self: 'EOL' -EOL = EOL() -class CONTL(object): - """ligne de continuation""" - __repr__ = __string__ = lambda self: 'CONTL' -CONTL = CONTL() -class EOF(object): - """fin de fichier""" - __repr__ = __string__ = lambda self: 'EOF' -EOF = EOF() - -class Lexer(object): - file = None - lexems = None - _inf = None - _lcount = None - _line = None - - def __init__(self, file, parse=True): - self.file = file - if parse: self.parse() - - def next_line(self): - line = self._inf.readline() - if line == '': return None - if line.endswith("\r\n"): line = line[:-2] - elif line.endswith("\n"): line = line[:-1] - elif line.endswith("\r"): line = line[:-1] - self._lcount += 1 - self._line = line - return line - - def is_empty(self): return self._line == '' - def isa_comment(self): return self._line[:1] == '#' - def isa_squote(self): return self._line[:1] == "'" - def isa_dquote(self): return self._line[:1] == '"' - - RE_SPACE = re.compile(r'\s+') - RE_COMMENT = re.compile(r'#.*') - def parse_ws(self): - # c'est une ligne de continuation si elle commence par des espaces et ne - # rencontre pas de commentaire - contl = False - mo = self.RE_SPACE.match(self._line) - if mo is not None: - self._line = self._line[mo.end(0):] - contl = True - mo = self.RE_COMMENT.match(self._line) - if mo is not None: - self._line = self._line[mo.end(0):] - contl = False - return contl - def isa_space(self): return self.RE_SPACE.match(self._line) is not None - def isa_comment(self): return self.RE_COMMENT.match(self._line) is not None - - RE_SQUOTE = re.compile(r"'") - def parse_sstring(self): - slos = self._lcount - lexem = '' - self._line = self._line[1:] - mo = self.RE_SQUOTE.search(self._line) - while mo is None: - lexem += self._line - if self.next_line() is None: - raise ValueError("unterminated quoted string starting at line %i" % slos) - lexem += "\n" - mo = self.RE_SQUOTE.search(self._line) - lexem += self._line[0:mo.start(0)] - self._line = self._line[mo.end(0):] - return lexem - - RE_DQUOTE = re.compile(r'"') - def parse_dstring(self): - slos = self._lcount - lexem = '' - self._line = self._line[1:] - mo = self.RE_DQUOTE.search(self._line) - while mo is None: - lexem += self._line - if self.next_line() is None: - raise ValueError("unterminated double-quoted string starting at line %i" % slos) - lexem += "\n" - mo = self.RE_DQUOTE.search(self._line) - lexem += self._line[0:mo.start(0)] - self._line = self._line[mo.end(0):] - lexem = lexem.replace('\\"', '"') - lexem = lexem.replace("\\'", "'") - lexem = lexem.replace('\\\\', '\\') - return lexem - - RE_EOS = re.compile(r'''\s|(? oclass - _t2cmap, t2cmap = None, property(lambda self: self._t2cmap) - # map otype --> id2object - _t2iomap, t2iomap = None, property(lambda self: self._t2iomap) - - # liste d'instance de facts - _facts, facts = None, property(lambda self: self._facts) - - # types d'objets qui doivent être résolus en premier - _rf_otypes = None - - def __init__(self): - self._t2cmap = {} - self._t2iomap = {} - self._facts = [] - self._rf_otypes = set() - - def register(self, oclass, otype=None, resolve_first=False): - if otype is None: - otype = oclass.__name__.lower() - self._t2cmap[otype] = oclass - self._rf_otypes.add(otype) - - ############################################################################ - # Création - - def create_object(self, otype, oid, *values, **attrs): - """créer un nouvel objet du type spécifié ou le mettre à jour - """ - if isinstance(otype, type): - oclass = otype - otype = otype.TYPE - else: - oclass = self._t2cmap.get(otype, None) - if not self._t2iomap.has_key(otype): - self._t2iomap[otype] = {} - i2omap = self._t2iomap[otype] - if not i2omap.has_key(oid): - if oclass is None: - object = Object(oid, *values, **attrs) - object.otype = otype - else: - object = oclass(oid, *values, **attrs) - i2omap[oid] = object - else: - object = i2omap[oid] - if values: object.update('values', values) - if attrs: object.update(attrs) - return object - - def create_fact(self, sotype, soid, verb, totype, toid, **attrs): - """créer un nouveau fait entre les deux objets spécifiés - """ - if isinstance(sotype, type): sotype = sotype.TYPE - if isinstance(totype, type): totype = totype.TYPE - fact = Fact(sotype, soid, verb, totype, toid, **attrs) - self._facts.append(fact) - return fact - - def resolve(self): - """résoudre tous les objets et tous les faits - """ - rf_otypes = self._rf_otypes - # d'abord résoudre les types d'objets mentionnés dans rf_otypes - for otype in rf_otypes: - i2omap = self.t2iomap.get(otype, None) - if i2omap is None: continue - for id, object in i2omap.items(): - object.resolve(self) - # puis résoudre les autres types d'objets - for otype, i2omap in self.t2iomap.items(): - if otype in rf_otypes: continue - for id, object in i2omap.items(): - object.resolve(self) - # puis résoudre tous les faits - for fact in self.facts: - fact.resolve(self) - return self - - ############################################################################ - # Consultation - - def get(self, otype, oid, default=_RAISE_EXCEPTION, create=True, resolve=True): - """obtenir un objet par son type et son identifiant - - par défaut, le créer s'il n'existe pas. avec create=True, l'argument - default est ignoré. - - si create=False, default indique la valeur à retourner. lancer une - exception ValueError si default=_RAISE_EXCEPTION (c'est la valeur par - défaut) - """ - object = None - i2omap = self._t2iomap.get(otype, None) - if i2omap is not None: object = i2omap.get(oid, None) - if object is None and not create: - if default is _RAISE_EXCEPTION: - raise ValueError("%s:%s: not found" % (otype, oid)) - else: - return default - if object is None: - object = self.create_object(otype, oid) - if resolve: - object.resolve(self) - return object - - ############################################################################ - # Recherches - - def find_tobjects(self, totype, objects, create=True, resolve=True): - """trouver les objets liés de type totype dans la objects - """ - objects = listof(objects) - if totype is not None: - # mettre dans un dictionnaire et indexer sur oid pour éviter les - # doublons - tobjects = {} - for object in objects: - if object.otype == totype: - tobjects[object.oid] = object - else: - lobjects = [link.resolve(self, None, create, resolve) for link in object.get_links(totype)] - for lobject in lobjects: - if lobject is None: continue - tobjects[lobject.oid] = lobject - objects = tobjects.values() - return objects - - def filter_objects(self, expr, objects): - """ne garder dans la liste objects que les objets qui correspondent à - l'expression. - """ - objects = listof(objects) - return [object for object in objects if Expr.match_term(expr, object)] - - def find_objects(self, otype=None, oid=None, - totype=None, expr=None, - create=True, resolve=True): - """chercher les objets correspondant à otype et/ou oid - - si totype!=None, alors chercher les objets liés qui sont de ce type - """ - otypes = listof(otype, None) - oids = listof(oid, None) - if otypes is not None and oids is not None: - objects = [] - for otype in otypes: - i2omap = self.t2iomap.get(otype, {}) - objects.extend([object for object in i2omap.values() if object.oid in oids]) - elif otypes is not None and oids is None: - objects = [] - for otype in otypes: - i2omap = self.t2iomap.get(otype, {}) - objects.extend(i2omap.values()) - elif oids is not None and otypes is None: - objects = [] - for otype, i2omap in self.t2iomap.items(): - objects.extend([object for object in i2omap.values() if object.oid in oids]) - else: - objects = [] - for otype, i2omap in self.t2iomap.items(): - objects.extend(i2omap.values()) - if resolve: - map(lambda object: object.resolve(self), objects) - objects = self.find_tobjects(totype, objects, create, resolve) - if expr is not None: - objects = self.filter_objects(expr, objects) - return objects - - def filter_facts(self, expr, facts): - """ne garder dans la liste facts que les faits qui correspondent à l'expression - """ - facts = listof(facts) - return [(fact, tsobjects, ttobjects) - for (fact, tsobjects, ttobjects) in facts - if Expr.match_term(expr, fact)] - - def find_facts(self, sotype=None, soid=None, verb=None, totype=None, toid=None, - tsotype=None, tsexpr=None, - ttotype=None, ttexpr=None, - expr=None, - resolve=True): - """chercher les faits correspondant aux arguments - - retourner une liste de tuples (fact, tsobjects, ttobjects) où - * fact est le fait original - * tsobjects sont les objets sources liés si tsotype et tsexpr sont - spécifiés - * ttobjects sont les objets destination liés si ttotype et ttexpr sont - spécifiés - """ - sotypes = listof(sotype, None) - soids = listof(soid, None) - verbs = listof(verb, None) - totypes = listof(totype, None) - toids = listof(toid, None) - facts = [] - for fact in self.facts: - if sotypes is not None and fact.sotype not in sotypes: - continue - if soids is not None and fact.soid not in soids: - continue - if verbs is not None and fact.verb not in verbs: - continue - if totypes is not None and fact.totype not in totypes: - continue - if toids is not None and fact.toid not in toids: - continue - tsobjects = [fact.sresolve(self, None, True)] - ttobjects = [fact.tresolve(self, None, True)] - if tsotype is not None: - # chercher les objets liés dans la source - tsobjects = self.filter_objects(tsexpr, self.find_tobjects(tsotype, tsobjects)) - if not tsobjects: continue - if ttotype is not None: - # chercher les objets liés dans la source - ttobjects = self.filter_objects(ttexpr, self.find_tobjects(ttotype, ttobjects)) - if not ttobjects: continue - facts.append((fact, tsobjects, ttobjects)) - if resolve: - for fact, tsobjects, ttobjects in facts: - fact.resolve(self) - if expr is not None: - facts = self.filter_facts(expr, facts) - return facts - - ############################################################################ - # Divers - - def dump(self): - self.resolve() - for otype, i2omap in self.t2iomap.items(): - print "OBJECTS:%s:" % otype - for id, object in i2omap.items(): - object.dump(" ") - if self.facts: - print "FACTS:" - for fact in self.facts: - fact.dump(" ") - -################################################################################ -# liens - -class Link(object): - """Un lien vers une référence d'un objet - - Un lien a le type de l'objet cible (propriété `otype`), son identifiant - (propriété `oid`), et des attributs multivalués (toutes les autres - propriétés) - """ - - ATTRS = dict(otype=None, oid=None, attrs=None) - - _rw_attrs = set(('otype', 'oid')) - _ro_attrs = set(('attrs',)) - _reserved_attrs = _rw_attrs | _ro_attrs - - _otype = None - _oid = None - _attrs = None - - def __init__(self, otype=None, oid=None, **attrs): - self.__dict__['_otype'] = otype - self.__dict__['_oid'] = oid - self.__dict__['_attrs'] = {} - for attr, value in attrs.items(): - self.update(attr, value) - - def __parse(self, attr, value): - """obtenir le parser qui permet de s'assurer que value est dans le bon - format pour l'attribut attr. - """ - if isindex(attr): parser = None - else: parser = self.ATTRS.get(attr, None) - if parser is None: return value - elif isseq(value): return flattenseq(map(parser.parse, value)) - else: return parser.parse(value) - - # accès aux attributs - def __getattr__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - try: - return self._attrs[attr] - except KeyError: - raise AttributeError(attr) - def __setattr__(self, attr, value): - value = self.__parse(attr, value) - if attr in self._rw_attrs: - return super(Link, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise AttributeError(attr) - else: - self._attrs[attr] = listof(value) - def __delattr__(self, attr): - if attr in self._reserved_attrs: - raise AttributeError(attr) - try: - del self._attrs[attr] - except KeyError: - raise AttributeError(attr) - def __getitem__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - else: - return self._attrs[attr] - def __setitem__(self, attr, value): - value = self.__parse(attr, value) - if attr in self._rw_attrs: - return super(Link, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise KeyError(attr) - else: - self._attrs[attr] = listof(value) - def __delitem__(self, attr): - if attr in self._reserved_attrs: - raise KeyError(attr) - else: - del self._attrs[attr] - - def first(self, attr, default=None): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - if self._attrs.has_key(attr): - values = self._attrs[attr] - if values: return values[0] - return default - def get(self, attr, default=None): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - else: - return self._attrs.get(attr, default) - def has_key(self, attr): - """tester l'existence d'un attribut""" - if attr in self._reserved_attrs: - return True - else: - return self._attrs.has_key(attr) - @property - def known_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma""" - return sorted(list(self.ATTRS.keys())) - @property - def misc_attrs(self): - """obtenir une liste triée d'attributs ne faisant pas partie du schéma""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(defined_attrs - schema_attrs)) - @property - def missing_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma mais non définis""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(schema_attrs - defined_attrs - self._reserved_attrs)) - def update(self, attr, value=None, update_type=ADD_UNIQUE): - """mettre à jour l'attribut spécifié - - si l'attribut n'existe pas, il est créé. sinon, la liste des valeurs de - l'attribut est étendue. - - si value==None, aucune mise à jour n'est effectuée - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés. - - update_type est la méthode de mise à jour - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.update(attr, value, update_type) - return self - if attr in self._reserved_attrs: - raise KeyError(attr) - if value is not None: - values = listof(self.__parse(attr, value)) - if not self._attrs.has_key(attr): - self._attrs[attr] = [] - attr = self._attrs[attr] - if update_type is ADD_UNIQUE: - for value in values: - if value not in attr: - attr.append(value) - elif update_type is ADD: - attr.extend(values) - elif update_type is REMOVE: - for value in values: - if value in attr: - attr.remove(value) - elif update_type is RESET_ADD: - attr[:] = values - return self - def set_defaults(self, attr, value=None, update_type=ADD_UNIQUE): - """Mettre à jour l'attribut spécifié s'il n'existe pas - - si value==None, aucune mise à jour n'est effectuée - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés s'ils n'existent pas. - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.set_defaults(attr, value, update_type) - return self - if attr in self._reserved_attrs: - raise KeyError(attr) - if not self._attrs.has_key(attr): - self.update(attr, value, update_type) - return self - - def clone(self): - """cloner ce lien""" - return self.__class__(self._otype, self._oid, **self._attrs) - - # catalogue - def resolve(self, catalog, default=_RAISE_EXCEPTION, create=True, resolve=True): - """obtenir l'objet lié - """ - return catalog.get(self.otype, self.oid, default, create, resolve) - - # divers - def _dump_idtype(self, indent, prefix=None): - if prefix is None: prefix = '' - else: prefix = "%s " % prefix - print "%s%s%s:%s" % (indent, prefix, self._otype, self._oid) - def _dump_attrs(self, indent): - attrs = self._attrs - missing_attrs = self.missing_attrs - if attrs or missing_attrs: - print "%s attrs:" % indent - for name, values in attrs.items(): - if len(values) == 1: - print "%s %s=%s" % (indent, name, repr(values[0])) - else: - print "%s %s=(%s)" % (indent, name, ', '.join(map(repr, values))) - for name in missing_attrs: - print "%s %s=" % (indent, name) - def dump(self, indent='', prefix=None): - """Afficher l'identifiant, le type et les attributs de ce lien - """ - self._dump_idtype(indent, prefix) - self._dump_attrs(indent) - -################################################################################ -# objets - -class MetaObject(type): - def __init__(cls, name, bases, attrs): - type.__init__(cls, name, bases, attrs) - if cls.__dict__.get('TYPE', None) is None: - cls.TYPE = cls.__name__.lower() - register = not cls.__dict__.get('__NO_AUTO_REGISTER__', False) - resolve_first = cls.__dict__.get('__RESOLVE_FIRST__', False) - if register: - catalog.register(cls, cls.TYPE, resolve_first) - -class Object(object): - """Un objet générique - - Un objet a un identifiant (propriété `oid`), un type (propriété `otype`), une - liste de valeurs (propriété `values`), des liens vers d'autres objets - (propriété `links`) et des attributs multivalués (toutes les autres propriétés). - - Le type de l'objet définit un schéma, c'est à dire un ensemble d'attributs - spécifiques avec des valeurs par défaut. Les attributs du schéma sont les - attributs connus (propriété known_attrs), les autres sont les attributs - divers (propriété misc_attrs) - """ - - __metaclass__ = MetaObject - __NO_AUTO_REGISTER__ = True - __RESOLVE_FIRST__ = False - - ATTRS = dict(otype=None, oid=None, values=None, attrs=None, links=None) - TYPE = 'object' - - _rw_attrs = set(('otype', 'oid')) - _ro_attrs = set(('values', 'attrs', 'links')) - _reserved_attrs = _rw_attrs | _ro_attrs - - _otype = None - _oid = None - _values = None - _attrs = None - _links = None - - _resolved = None - - def __init__(self, oid=None, *values, **attrs): - self.__dict__['_otype'] = self.TYPE - self.__dict__['_oid'] = oid - self.__dict__['_values'] = [] - self.__dict__['_attrs'] = {} - self.__dict__['_links'] = {} - self.__dict__['_resolved'] = False - self.update('values', values) - for attr, value in attrs.items(): - self.update(attr, value) - - def __parse(self, attr, value): - """obtenir le parser qui permet de s'assurer que value est dans le bon - format pour l'attribut attr. Utiliser attr==None pour l'attribut values - """ - if isindex(attr): attr = 'values' - parser = self.ATTRS.get(attr, None) - if parser is None: return value - elif isseq(value): return flattenseq(map(parser.parse, value)) - else: return parser.parse(value) - - # accès aux valeurs (via un index numérique) et aux attributs (via le nom de - # l'attribut) - def __getattr__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - try: - if isindex(attr): return self._values[attr] - else: return self._attrs[attr] - except KeyError: - raise AttributeError(attr) - def __setattr__(self, attr, value): - value = self.__parse(attr, value) - if attr == 'values': - self._values[:] = listof(value) - elif attr in self._rw_attrs: - super(Object, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise AttributeError(attr) - elif attr in self.__dict__: - super(Object, self).__setattr__(attr, value) - elif isindex(attr): - self._values[attr] = value - else: - self._attrs[attr] = listof(value) - self.__dict__['_resolved'] = False - def __delattr__(self, attr): - if attr in self._reserved_attrs: - raise AttributeError(attr) - try: - if isindex(attr): del self._values[attr] - else: del self._attrs[attr] - except KeyError: - raise AttributeError(attr) - self.__dict__['_resolved'] = False - def __getitem__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - elif isindex(attr): - return self._values[attr] - else: - return self._attrs[attr] - def __setitem__(self, attr, value): - value = self.__parse(attr, value) - if attr == 'values': - self._values[:] = listof(value) - elif attr in self._rw_attrs: - return super(Object, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise KeyError(attr) - elif isindex(attr): - self._values[attr] = value - else: - self._attrs[attr] = listof(value) - self.__dict__['_resolved'] = False - def __delitem__(self, attr): - if attr in self._reserved_attrs: - raise KeyError(attr) - elif isindex(attr): - del self._values[attr] - else: - del self._attrs[attr] - self.__dict__['_resolved'] = False - - # accès spécifique aux valeurs - __nonzero__ = lambda self: True - def __len__(self): - """obtenir le nombre de valeurs""" - return len(self._values) - def __iter__(self): - """obtenir un itérateur sur les valeurs""" - return self._values.__iter__() - def __reversed__(self): - """obtenir la liste des valeurs inversée""" - return self._values.__reversed__() - def __contains__(self, item): - """tester l'existence d'une valeur""" - return item in self._values - def append(self, value): - """ajouter une valeur""" - return self._values.append(value) - def insert(self, index, value): - """insérer une valeur à la position spécifiée""" - return self._values.insert(index, value) - def extend(self, seq): - """étendre la liste des valeurs""" - return self._values.extend(seq) - - # accès spécifique aux attributs - def first(self, attr, default=None): - """obtenir la première valeur de l'attribut""" - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - if self._attrs.has_key(attr): - values = self._attrs[attr] - if values: return values[0] - return default - def get(self, attr, default=None): - """obtenir l'attribut sous forme de liste""" - if attr in self._reserved_attrs: - return listof(getattr(self, '_%s' % attr)) - else: - return self._attrs.get(attr, default) - def has_key(self, attr): - """tester l'existence d'un attribut""" - if attr in self._reserved_attrs: - return True - else: - return self._attrs.has_key(attr) - @property - def known_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma""" - return sorted(list(self.ATTRS.keys())) - @property - def known_rw_attrs(self): - """obtenir une liste triée des attributs faisant partie du schéma accessibles en écriture""" - return sorted(list(set(self.ATTRS.keys()) - self._ro_attrs)) - @property - def misc_attrs(self): - """obtenir une liste triée d'attributs ne faisant pas partie du schéma""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(defined_attrs - schema_attrs)) - @property - def missing_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma mais non définis""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(schema_attrs - defined_attrs - self._reserved_attrs)) - def update(self, attr, value=None, update_type=ADD_UNIQUE): - """mettre à jour l'attribut spécifié - - si l'attribut n'existe pas, il est créé. sinon, la liste des valeurs de - l'attribut est étendue. - - si value==None, aucune mise à jour n'est effectuée - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés. - - update_type est la méthode de mise à jour - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.update(attr, value, update_type) - return self - if attr == 'values': pass - elif attr in self._reserved_attrs: - raise KeyError(attr) - if value is not None: - values = listof(self.__parse(attr, value)) - if attr == 'values': - attr = self._values - else: - if not self._attrs.has_key(attr): self._attrs[attr] = [] - attr = self._attrs[attr] - if update_type is ADD_UNIQUE: - for value in values: - if value not in attr: - attr.append(value) - elif update_type is ADD: - attr.extend(values) - elif update_type is REMOVE: - for value in values: - if value in attr: - attr.remove(value) - elif update_type is RESET_ADD: - attr[:] = values - self.__dict__['_resolved'] = False - return self - def set_defaults(self, attr, value=None, update_type=ADD_UNIQUE): - """Mettre à jour l'attribut spécifié s'il n'existe pas - - si value==None, aucune mise à jour n'est effectuée - - utiliser attr==None pour mettre à jour l'attribut values - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés. - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.set_defaults(attr, value, update_type) - return self - if attr == 'values': - if not self._values: - self.update('values', value, update_type) - elif attr in self._reserved_attrs: - raise KeyError(attr) - elif not self._attrs.has_key(attr): - self.update(attr, value, update_type) - return self - - def clone(self): - """cloner cet objet""" - o = self.__class__(self._oid, self._values, **self._attrs) - # XXX cloner aussi les liens - return o - - # gestion des liens - def linkto(self, loi, otype=None, **attrs): - """lier vers une référence d'un autre objet - - loi peut être: - * une instance de Link - * une instance d'Object - * un identifiant d'objet, auquel cas otype est requis - - @return l'instance du lien créé - """ - if isinstance(loi, Link): - create = False - link = loi.clone() - elif isinstance(loi, Object): - otype = loi.otype - oid = loi.oid - create = True - else: - if otype is None: raise ValueError('otype is required') - oid = loi - create = True - if create: - link = Link(otype, oid, **attrs) - else: - link.update(attrs) - links = self._links - if not links.has_key(link.otype): - links[otype] = [] - links[otype].append(link) - self.__dict__['_resolved'] = False - return link - - def get_links(self, otype=None, clone=False): - """retourner les liens vers les objets du type spécifié - - si otype==None, alors retourner tous les liens - - si clone==True, faire un clone des liens avant de les retourner - """ - if otype is None: - links = [] - for otype, tmplinks in self._links.items(): - links.extend(tmplinks) - else: - links = listof(self._links.get(otype, ())) - if clone: - links = [link.clone() for link in links] - return links - - # catalogue - def resolve_basedir(self, basedirs, dirs=False, files=False, - filespec=None, - dir_attr='dir', file_attr='file', - parentdir_attr='parentdir'): - """retourner les chemins absolus des fichiers (et/ou répertoires) trouvés dans - les répertoires basedirs - - si les arguments dir_attr, file_attr, parentdir_attr ne sont pas None - (ce qui est le cas par défaut), alors l'attribut est mis à jour avec - respectivement les répertoires, les fichiers, et les répertoires parent - trouvés - """ - filespecs = listof(filespec, None) - result = [] - for basedir in basedirs: - basedir = path.expanduser(basedir) - basedir = path.abspath(basedir) - for name in os.listdir(basedir): - if filespecs is not None: - found = False - for filespec in filespecs: - if fnmatch(name, filespec): - found = True - break - if not found: continue - pf = path.join(basedir, name) - if path.isdir(pf) and (dirs or dirs == files): - result.append(pf) - if dir_attr is not None: - self.update(dir_attr, pf) - elif path.isfile(pf) and (files or dirs == files): - result.append(pf) - if file_attr is not None: - self.update(file_attr, pf) - if parentdir_attr is not None: - self.update(parentdir_attr, map(path.dirname, result)) - return result - def resolve_filespec(self, filespecs, dirs=False, files=False, - dir_attr='dir', file_attr='file', - parentdir_attr='parentdir'): - """retourner les chemins absolus des fichiers (et/ou répertoires) correspondant - aux modèles filespecs (qui doivent être de type glob) - - si les arguments dir_attr, file_attr, parentdir_attr ne sont pas None - (ce qui est le cas par défaut), alors l'attribut est mis à jour avec - respectivement les répertoires, les fichiers, et les répertoires parent - trouvés - """ - result = [] - for filespec in filespecs: - filespec = path.expanduser(filespec) - for file in glob(filespec): - pf = path.abspath(file) - if path.isdir(pf) and (dirs or dirs == files): - result.append(pf) - if dir_attr is not None: - self.update(dir_attr, pf) - elif path.isfile(pf) and (files or dirs == files): - result.append(pf) - if file_attr is not None: - self.update(file_attr, pf) - if parentdir_attr is not None: - self.update(parentdir_attr, map(path.dirname, result)) - return result - - def _resolve(self, catalog): - """à surcharger dans les classes dérivées""" - values = [] - search_basedir = self.get('search_basedir', ()) - files = 'files' in search_basedir - dirs = 'dirs' in search_basedir - basedir = self.get('basedir', None) - if basedir is not None: - values.extend(self.resolve_basedir(basedir, files=files, dirs=dirs)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - values.extend(self.resolve_filespec(dirspec, dirs=True)) - filespec = self.get('filespec', None) - if filespec is not None: - values.extend(self.resolve_filespec(filespec, files=True)) - if not self.values: - self.values = values - - def resolve(self, catalog, recursive=True): - """normaliser cet objet et compléter les données manquantes. si recursive==True - (la valeur par défaut), normaliser aussi les objets liés. - - @return True si l'objet a été modifié, False si l'objet avait déjà été résolu - """ - if self._resolved: return False - self._resolve(catalog) - if recursive: - for otype, links in self.links.items(): - for link in links: - link.resolve(catalog) - self.__dict__['_resolved'] = True - return True - - # divers - def _dump_idtype(self, indent): - print "%s%s:%s" % (indent, self._otype, self._oid) - def _dump_values(self, indent): - values = self._values - if len(values) == 0: - pass - elif len(values) == 1: - print "%s values=%s" % (indent, repr(values[0])) - else: - print "%s values=(%s)" % (indent, ', '.join(map(repr, values))) - def _dump_attrs(self, indent): - attrs = self._attrs - missing_attrs = self.missing_attrs - if attrs or missing_attrs: - print "%s attrs:" % indent - for name, values in attrs.items(): - if len(values) == 1: - print "%s %s=%s" % (indent, name, repr(values[0])) - else: - print "%s %s=(%s)" % (indent, name, ', '.join(map(repr, values))) - for name in missing_attrs: - print "%s %s=" % (indent, name) - def _dump_links(self, indent): - if self.links: - for ltype, links in self.links.items(): - for link in links: - link.dump("%s " % indent, '+->') - def dump(self, indent=''): - """Afficher l'identifiant, le type, les valeurs, les attributs et les liens de cet objet - """ - self._dump_idtype(indent) - self._dump_values(indent) - self._dump_attrs(indent) - self._dump_links(indent) - - def __repr__(self): - oid = repr(self._oid) - values = self._values - if values: values = ", %s" % ', '.join(map(repr, values)) - else: values = "" - attrs = self._attrs - if attrs: attrs = ", **%s" % repr(attrs) - else: attrs = "" - return "%s(%s%s%s)" % (self.__class__.__name__, oid, values, attrs) - -################################################################################ -# Faits - -class Fact(object): - """Un fait liant deux références d'objets - - Le fait a le type de l'objet source (propriété `sotype`), son identifiant - (propriété `soid`), le verbe décrivant le lien (propriété `verb`), le type - de l'objet cible (propriété `totype`), son identifiant (propriété `toid`), - et des attributs multivalués (toutes les autres propriétés) - """ - - ATTRS = dict( - sotype=None, soid=None, - verb=None, - totype=None, toid=None, - attrs=None, - ) - - _rw_attrs = set(('sotype', 'soid', 'verb', 'totype', 'toid')) - _ro_attrs = set(('attrs',)) - _reserved_attrs = _rw_attrs | _ro_attrs - - _sotype = None - _soid = None - _verb = None - _totype = None - _toid = None - _attrs = None - - def __init__(self, sotype=None, soid=None, verb=None, totype=None, toid=None, **attrs): - if verb.startswith('~'): - verb = verb[1:] - tmpotype, tmpoid = totype, toid - totype, toid = sotype, soid - sotype, soid = tmpotype, tmpoid - self.__dict__['_sotype'] = sotype - self.__dict__['_soid'] = soid - self.__dict__['_verb'] = verb - self.__dict__['_totype'] = totype - self.__dict__['_toid'] = toid - self.__dict__['_attrs'] = {} - for attr, value in attrs.items(): - self.update(attr, value) - - def __parse(self, attr, value): - """obtenir le parser qui permet de s'assurer que value est dans le bon - format pour l'attribut attr. - """ - if isindex(attr): parser = None - else: parser = self.ATTRS.get(attr, None) - if parser is None: return value - elif isseq(value): return flattenseq(map(parser.parse, value)) - else: return parser.parse(value) - - # accès aux attributs - def __getattr__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - try: - return self._attrs[attr] - except KeyError: - raise AttributeError(attr) - def __setattr__(self, attr, value): - value = self.__parse(attr, value) - if attr in self._rw_attrs: - return super(Link, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise AttributeError(attr) - else: - self._attrs[attr] = listof(value) - def __delattr__(self, attr): - if attr in self._reserved_attrs: - raise AttributeError(attr) - try: del self._attrs[attr] - except KeyError: raise AttributeError(attr) - def __getitem__(self, attr): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - else: - return self._attrs[attr] - def __setitem__(self, attr, value): - value = self.__parse(attr, value) - if attr in self._rw_attrs: - return super(Link, self).__setattr__('_%s' % attr, value) - elif attr in self._ro_attrs: - raise KeyError(attr) - else: - self._attrs[attr] = listof(value) - def __delitem__(self, attr): - if attr in self._reserved_attrs: raise KeyError(attr) - else: del self._attrs[attr] - - def first(self, attr, default=None): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - if self._attrs.has_key(attr): - values = self._attrs[attr] - if values: return values[0] - return default - def get(self, attr, default=None): - if attr in self._reserved_attrs: - return getattr(self, '_%s' % attr) - else: - return self._attrs.get(attr, default) - def has_key(self, attr): - """tester l'existence d'un attribut""" - if attr in self._reserved_attrs: - return True - else: - return self._attrs.has_key(attr) - @property - def known_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma""" - return sorted(list(self.ATTRS.keys())) - @property - def misc_attrs(self): - """obtenir une liste triée d'attributs ne faisant pas partie du schéma""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(defined_attrs - schema_attrs)) - @property - def missing_attrs(self): - """obtenir une liste triée d'attributs faisant partie du schéma mais non définis""" - schema_attrs = set(self.ATTRS.keys()) - defined_attrs = set(self._attrs.keys()) - return sorted(list(schema_attrs - defined_attrs - self._reserved_attrs)) - def update(self, attr, value=None, update_type=ADD_UNIQUE): - """mettre à jour l'attribut spécifié - - si l'attribut n'existe pas, il est créé. sinon, la liste des valeurs de - l'attribut est étendue. - - si value==None, aucune mise à jour n'est effectuée - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés. - - update_type est la méthode de mise à jour - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.update(attr, value, update_type) - return self - if attr in self._reserved_attrs: - raise KeyError(attr) - if value is not None: - values = listof(self.__parse(attr, value)) - if not self._attrs.has_key(attr): self._attrs[attr] = [] - attr = self._attrs[attr] - if update_type is ADD_UNIQUE: - for value in values: - if value not in attr: - attr.append(value) - elif update_type is ADD: - attr.extend(values) - elif update_type is REMOVE: - for value in values: - if value in attr: - attr.remove(value) - elif update_type is RESET_ADD: - attr[:] = values - return self - def set_defaults(self, attr, value=None, update_type=ADD_UNIQUE): - """Mettre à jour l'attribut spécifié s'il n'existe pas - - si value==None, aucune mise à jour n'est effectuée - - si attr est une instance de dictionnaire, mettre à jour *tous* les - attributs spécifiés s'ils n'existent pas. - """ - if isinstance(attr, dict): - attrs = attr - for attr, value in attrs.items(): - self.set_defaults(attr, value, update_type) - return self - if attr in self._reserved_attrs: - raise KeyError(attr) - if not self._attrs.has_key(attr): - self.update(attr, value, update_type) - return self - - def clone(self): - """cloner ce lien""" - return self.__class__(self._sotype, self._soid, self._verb, self._totype, self._toid, **self._attrs) - - # catalogue - def sresolve(self, catalog, default=_RAISE_EXCEPTION, create=True, resolve=True): - return catalog.get(self.sotype, self.soid, default, create, resolve) - - def tresolve(self, catalog, default=_RAISE_EXCEPTION, create=True, resolve=True): - return catalog.get(self.totype, self.toid, default, create, resolve) - - def resolve(self, catalog, default=_RAISE_EXCEPTION, create=True, resolve=True): - """obtenir les objets liés (source, verb, target) - """ - source = catalog.get(self.sotype, self.soid, default, create, resolve) - target = catalog.get(self.totype, self.toid, default, create, resolve) - return (source, self.verb, target) - - # divers - def _dump_idtype(self, indent, prefix=None): - if prefix is None: prefix = '' - else: prefix = "%s " % prefix - print "%s%s%s:%s %s %s:%s " % (indent, prefix, self._sotype, self._soid, self._verb, self._totype, self._toid) - def _dump_attrs(self, indent): - attrs = self._attrs - missing_attrs = self.missing_attrs - if attrs or missing_attrs: - print "%s attrs:" % indent - for name, values in attrs.items(): - if len(values) == 1: - print "%s %s=%s" % (indent, name, repr(values[0])) - else: - print "%s %s=(%s)" % (indent, name, ', '.join(map(repr, values))) - for name in missing_attrs: - print "%s %s=" % (indent, name) - def dump(self, indent='', prefix=None): - """Afficher l'identifiant, le type et les attributs de ce lien - """ - self._dump_idtype(indent, prefix) - self._dump_attrs(indent) - -################################################################################ -# variables globales - -catalog = Catalog() diff --git a/lib/nulib/python/deploydb/parser.py b/lib/nulib/python/deploydb/parser.py deleted file mode 100644 index 0ab2c51..0000000 --- a/lib/nulib/python/deploydb/parser.py +++ /dev/null @@ -1,216 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Parser pour un fichier de configuration - -Syntaxe: -~~~ -# comment -object id var=value - continuation="line starting with a space" - -link otherid - --link objectref1 predicate -link objectref2 -~~~ -""" - -__all__ = ( - 'split_namev', 'split_nvalue', 'split_nlist', - 'Parser', -) - -import logging; log = logging.getLogger(__name__) -import sys, re - -from .utils import * -from .lexer import * -from .objects import * - -RE_NAMETYPE = re.compile(r'(\S+):(\w+)$') -RE_NAMEVALUE = re.compile(r'(\S+)=(\S*)') - -def split_namev(arg, sep=None): - """spliter un argument de la forme - name[method][=value] - - Si value n'est pas spécifié, il vaut None - method peut être % (reset), + (add), - (del) et sa valeur par défaut est ADD_UNIQUE - - si sep is not None, splitter values sur cette valeur - """ - if '=' in arg: - name, value = arg.split('=', 1) - if sep is not None: - value = value.split(sep) - else: - name = arg - value = None - if name.endswith('%'): - name = name[:-1] - method = RESET_ADD - elif name.endswith('+'): - name = name[:-1] - method = ADD - elif name.endswith('-'): - name = name[:-1] - method = REMOVE - else: - method = ADD_UNIQUE - mo = RE_NAMETYPE.match(name) - if mo is not None: - name, type = mo.groups() - else: - type = None - return name, value, method - -def split_nvalue(arg): - """spliter un argument de la forme - [name=]value - - Si name n'est pas spécifié, il vaut None - """ - if '=' in arg: - name, value = arg.split('=', 1) - else: - name = None - value = arg - return name, value - -def split_nlist(arg): - """spliter un argument de la forme - [name=]values - - Si name n'est pas spécifié, il vaut None - values est un ensemble de valeurs séparées par des virgules - """ - if '=' in arg: - name, values = arg.split('=', 1) - values = values.split(',') - else: - name = None - values = arg.split(',') - return name, values - -def split_list(arg): - """spliter un argument de la forme - values - - values est un ensemble de valeurs séparées par des virgules - """ - return arg.split(',') - -class LoneError(ValueError): - """Exception lancée quand un verbe attend un argument inexistant - """ - -class Parser(object): - args = None - - def __init__(self, file=None): - if file is not None: - self.parse(file=file) - - def parse(self, predicates=None, file=None): - if file is not None: - lexer = Lexer(file) - predicates = lexer.get_predicates() - for args in predicates: - self.args = args - if self.isa_link(): self.handle_fact() - else: self.handle_object() - return self - - def eop(self): - return not self.args - def isa_verb(self): - return self.args and not self.args[0].startswith('-') - def isa_link(self): - return self.args and self.args[0].startswith('-') - def isa_namevalue(self): - return self.args and RE_NAMEVALUE.match(self.args[0]) is not None - - def pop(self, desc=None): - arg = self.args.pop(0) - if desc is not None and self.eop(): - log.warning("lone %s '%s' was ignored", desc, arg) - raise LoneError(arg) - return arg - def pop_link(self, desc=None): - if not self.isa_link(): - raise ValueError("expected -otype") - return self.pop(desc)[1:] - def pop_namev(self, sep=None): - return split_namev(self.pop(), sep) - def pop_nvalue(self): - return split_nvalue(self.pop()) - def pop_nlist(self): - return split_nlist(self.pop()) - def pop_list(self): - return split_list(self.pop()) - - def handle_object(self): - # créer ou mettre à jour un objet - try: - otype = self.pop("object type") - if self.isa_namevalue(): - oid, values, method = self.pop_namev(',') - else: - oid = self.pop() - values = () - method = ADD_UNIQUE - object = catalog.create_object(otype, oid) - object.update('values', values, method) - while not self.isa_link(): - if self.eop(): break - name, value, method = self.pop_namev() - if value is None: value = 'true' - object.update(name, value, method) - while self.isa_link(): - ltype = self.pop_link() - lids = self.pop_list() - links = [object.linkto(lid, ltype) for lid in lids] - while not self.isa_link(): - if self.eop(): break - name, value, method = self.pop_namev() - if value is None: value = 'true' - for link in links: - link.update(name, value, method) - except LoneError: - pass - - def handle_fact(self): - # créer un ou plusieurs liens - try: - sotype = self.pop_link("source link type") - soids = self.pop_list() - defo = Object() # pour les attributs par défaut - while self.isa_namevalue(): - name, value, method = self.pop_namev() - if value is None: value = 'true' - defo.update(name, value, method) - while not self.eop(): - verb = self.pop("verb") - totype = self.pop_link("dest link type") - toids = self.pop_list() - facts = [] - for soid in soids: - for toid in toids: - fact = catalog.create_fact(sotype, soid, verb, totype, toid) - fact.update(defo.attrs) - facts.append(fact) - while self.isa_namevalue(): - name, value, method = self.pop_namev() - if value is None: value = 'true' - for fact in facts: - fact.update(name, value, method) - except LoneError: - pass - - def parse_attrs(self, namevalues, object): - """analyser une liste de définition d'attributs name=value et mettre à jour - object - """ - self.args = listof(namevalues) - while not self.eop(): - name, value, method = self.pop_namev() - if value is None: value = 'true' - object.update(name, value, method) diff --git a/lib/nulib/python/deploydb/toinst_module.py b/lib/nulib/python/deploydb/toinst_module.py deleted file mode 100644 index d4b7a0e..0000000 --- a/lib/nulib/python/deploydb/toinst_module.py +++ /dev/null @@ -1,377 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'Webapp', - 'webapp_matcher', 'webappname_matcher', -) - -import logging; log = logging.getLogger(__name__) -import os, sys -from os import path - -from .utils import * -from .expr import * -from .objects import XT, fileP, pathP, mpathP, lowerP, Object, catalog -from .parser import Parser -from .base_module import withdomain, host_matcher, hostname_matcher - -################################################################################ -# Webapp - -def match_webapp(qwebapp, object): - if withpath(qwebapp): # webapp avec chemin - return qwebapp in object.get('webapp', ()) - else: # nom de webapp - return qwebapp in object.get('webappname', ()) -def webapp_matcher(qwebapp): - return lambda object: match_webapp(qwebapp, object) - -def match_webappname(qwebapp, object): - qwebapp = path.basename(qwebapp) - return qwebapp in object.get('webappname', ()) -def webappname_matcher(qwebapp): - return lambda object: match_webappname(qwebapp, object) - -class Webapp(Object): - ATTRS = XT(Object, - values=pathP, webapp=mpathP, webappdir=pathP, - basedir=pathP, dirspec=fileP) - - def _resolve(self, catalog): - if self.oid == '*': return - default = catalog.get(self.otype, '*', None, False) - - webapps = self.get('webapp', []) - basedir = self.get('basedir', None) - if basedir is not None: - webapps.extend(self.resolve_basedir(basedir, dirs=True)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - webapps.extend(self.resolve_filespec(dirspec, dirs=True)) - - if webapps: - # générer webappdir et webappname à partir de webapp - webappdirs = [path.dirname(webapp) for webapp in webapps] - if webappdirs: webappdirs = self.webappdir = ulistof(webappdirs) - - webappnames = [path.basename(webapp) for webapp in webapps] - if webappnames: webappnames = self.webappname = ulistof(webappnames) - - else: - # générer webapps à partir de webappdir et webappname - webappdirs = self.get('webappdir', ()) - if not webappdirs and default is not None: - webappdirs = default.get('webappdir', ()) - if webappdirs: webappdirs = self.webappdir = ulistof(webappdirs) - - webappnames = self.get('webappname', ()) - if not webappnames: webappnames = [self.oid] - if webappnames: webappnames = self.webappname = ulistof(webappnames) - - if webappdirs: - webapps = [] - for webappname in webappnames: - found = [] - for webappdir in webappdirs: - webapp = path.join(webappdir, webappname) - if path.exists(webapp): - found.append(webapp) - break - if not found: - found = [path.join(webappdirs[0], webappname)] - webapps.extend(found) - else: - webapps = webappnames - if webapps: webapps = self.webapp = ulistof(webapps) - - if not self.values: - self.values = webapps - -################################################################################ -# Actions - -def option_choice(yesoption, nooption): - def func(value, *ignored): - if istrue(value): return yesoption - else: return nooption - return func - -def pffprofile_support(value, fact, webapp): - pffprofile = fact.get('pffprofile', None) - if pffprofile is None: pffprofile = webapp.get('pffprofile', None) - if pffprofile is None: return None - if value == 'ensure': - return ['--ensure-pffprofile', pffprofile[0]] - elif value == 'set': - return ['--set-pffprofile', pffprofile[0]] - -TOINST_ATTRS = { - 'tomcat_profile': dict( - option='--config-profile', - ), 'catalina_base': dict( - option='--catalina-base', - ), 'tomcat_user': dict( - option='--tomcat-user', - ), 'tomcat_group': dict( - option='--tomcat-group', - ), 'tomcat_version': dict( - option='--tomcat-version', - ), 'manager_url': dict( - option='--manager-url', - ), 'manager_user': dict( - option='--manager-user', - ), 'manager_password': dict( - option='--manager-password', - ), 'wamap': dict( - option='--wamap', - multiple=True, - flattensep=',', - ), 'exclude': dict( - option='--exclude', - multiple=True, - ), 'exclude_override': dict( - option='--replace-excludes', - multiple=True, - ), 'protect': dict( - option='--protect', - multiple=True, - ), 'rsync_option': dict( - option='--rsync-option', - multiple=True, - ), 'rsync_option_override': dict( - option='--replace-rsync-options', - multiple=True, - ), 'backup': dict( - func=option_choice('--backup', '--no-backup'), - ), 'restart': dict( - func=option_choice('--restart', '--no-restart'), - ), 'legacy_sort': dict( - func=option_choice('--legacy-sort', None), - ), 'pffprofile_support': dict( - func=pffprofile_support, - ), -} - -def query_rtoinst(*args): - """afficher la commande pour déployer avec la commande $1 la webapp $2 sur - l'hôte $3 dans le profil $4 ou le profil pff $5 - - $1 doit valoir 'rtoinst' ou être un chemin vers ce script - - $2 peut être - * un nom de webapp: toutes les webapps de ce nom sont sélectionnés - * un chemin complet: si une webapp avec le chemin complet est trouvée, ne - sélectinner que celle-là, sinon faire comme si on n'avait spécifié que le - nom de la webapp - * non spécifié: toutes les webapps devant être déployé sur l'hôte sont - cherchées - - $3 peut être - * un nom d'hôte: tous les hôtes de ce nom sont sélectionés - * un nom d'hôte pleinement qualifié: si le nom d'hôte pleinement qualifié - est trouvé, ne sélectionner que celui-là, sinon faire comme si on n'avait - spécifié que le nom d'hôte - * non spécifié: tous les hôtes vers lequel doit être déployé le webapp sont - cherchés - - $4 peut valoir - * 'NONE': seuls les déploiements sans profils définis sont sélectionnés. - c'est la valeur par défaut. - * 'ALL' ou '': ne pas tenir compte du profil lors de la sélection des - webapps et des hôtes - * toute autre valeur, e.g prod ou test: seuls les déploiements de ce profil - sont sélectionnés - Il est possible de spécifier plusieurs profils en les séparant par des - virgules. Par exemple, 'NONE,prod' permet de sélectionner les déploiements - sans profil ou dans le profil 'prod' - - $5 peut valoir - * 'NONE': seuls les déploiement sans profils pff définis sont sélectionnés. - * 'ALL' ou '': ne pas tenir compte du profil pff lors de la sélection des - webapps et des hôtes. c'est la valeur par défaut. - * toute autre valeur, e.g prod ou test: seuls les déploiements de ce profil - pff sont sélectionnés. - - la webapp, ou l'hôte, ou les deux sont requis. le profil et le profil pff - sont facultatifs. - - Les valeurs $5..$* sont des définitions d'attributs utilisées pour mettre à - jour les faits trouvés. Les mappings suivants sont supportés: - - = attribut = = option de toinst = - tomcat_profile --config-profile - catalina_base --catalina-base - tomcat_user --tomcat-user - tomcat_group --tomcat-group - tomcat_version --tomcat-version - manager_url --manager-url - manager_user --manager-user - manager_password --manager-password - wamap --wamap - exclude --exclude - exclude_override --replace-excludes - protect --protect - rsync_option --rsync-option - rsync_option_override --replace-rsync-options - backup --backup / --no-backup - restart --restart / --no-restart - legacy_sort --legacy-sort - pffprofile_support --ensure-pffprofile / --set-pffprofile - """ - rtoinst = args[0] if args[0:1] else None - if rtoinst is not None and (rtoinst == 'rtoinst' or rtoinst.endswith('/rtoinst')): - verb = 'rtoinst' - else: - raise ValueError("Le verbe est requis et doit valoir 'rtoinst'") - qwebapp = args[1:2] and args[1] or None - qhost = args[2:3] and args[2] or None - qprofile = args[3] if args[3:4] else 'NONE' - qpffprofile = args[4] if args[4:5] else 'ALL' - supplattrs = args[5:] - - if not qwebapp and not qhost: - raise ValueError("Il faut spécifier webapp et/ou host") - - if not qwebapp: - webapps = None - elif cwithpath(qwebapp): - qwebapp = path.abspath(qwebapp) - webapps = catalog.find_objects('webapp', expr=webapp_matcher(qwebapp)) - if not webapps: - webapps = catalog.find_objects('webapp', expr=webappname_matcher(qwebapp)) - else: - webapps = catalog.find_objects('webapp', expr=webappname_matcher(qwebapp)) - - if not qhost: - hosts = None - else: - if cwithpath(qhost): - qhost = path.basename(path.abspath(qhost)) - if withdomain(qhost): - hosts = catalog.find_objects('host', expr=host_matcher(qhost)) - if not hosts: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - else: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - - if qprofile == '': qprofile = 'ALL' - qprofiles = flattenstr([qprofile]) - if 'ALL' in qprofiles: - qprofile = None - else: - expr = [] - for qprofile in qprofiles: - if qprofile == 'NONE': - qprofile = NONE(EXISTS('profile')) - else: - qprofile = dict(profile=qprofile) - expr.append(qprofile) - qprofile = ANY(*expr) - - if qpffprofile == '': qpffprofile = 'ALL' - qpffprofiles = flattenstr([qpffprofile]) - if 'ALL' in qpffprofiles: - qpffprofile = None - else: - expr = [] - for qpffprofile in qpffprofiles: - if qpffprofile == 'NONE': - qpffprofile = NONE(EXISTS('pffprofile')) - else: - qpffprofile = dict(pffprofile=qpffprofile) - expr.append(qpffprofile) - qpffprofile = ANY(*expr) - - if qprofile is None and qpffprofile is None: - expr = None - elif qprofile is not None and qpffprofile is not None: - expr = ALL(qprofile, qpffprofile) - elif qprofile is not None: - expr = qprofile - elif qpffprofile is not None: - expr = qpffprofile - - # webapps et hosts sont spécifiés - if webapps is not None and hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='webapp', tsexpr=dict(oid=[webapp.oid for webapp in webapps]), - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=expr, - ) - - # Seuls les webapps sont spécifiés: chercher les hôtes - elif webapps is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='webapp', tsexpr=dict(oid=[webapp.oid for webapp in webapps]), - ttotype='host', - expr=expr, - ) - - # Seuls les hôtes sont spécifiés: chercher les webapps - elif hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='webapp', - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=expr, - ) - - # afficher la commande - if supplattrs: parser = Parser() - for fact, tsobjects, ttobjects in facts: - if supplattrs: parser.parse_attrs(supplattrs, fact) - hs = ':'.join(flattenseq([host.host for host in ttobjects])) - for webapp in tsobjects: - # construire les options de toinst. on prend les valeurs d'abord dans le - # fait puis dans l'objet webapp. - options = [] - names = set(fact.attrs.keys()) - names.update(webapp.attrs.keys()) - for name in names: - values = fact.get(name, None) - factvalue = True - if values is None: - values = webapp.get(name, None) - factvalue = False - if values is None: - # ne devrait pas se produire en principe - continue - if name in ('profile', 'pffprofile'): - # les attributs de sélection du profil ont été déjà été traités - # plus haut - continue - params = TOINST_ATTRS.get(name, None) - if params is None: - if factvalue: - log.warning("ignoring %s option %s=%r", fact.verb, name, values) - else: - func = params.get('func', None) - option = params.get('option', None) - if func is not None: - option = func(values[0], fact, webapp) - if option is not None: - options.extend(listof(option)) - elif option is not None: - if params.get('multiple', False): - flattensep = params.get('flattensep', None) - if flattensep is not None: - values = flattenstr(values, flattensep) - for value in values: - options.append(option) - options.append(qshell(value)) - else: - options.append(option) - options.append(qshell(values[0])) - else: - raise ValueError("missing option key for attribute %s" % name) - - for w in webapp.webapp: - # préférer si possible le chemin fourni par l'utilisateur - if withpath(qwebapp): w = qwebapp - parts = [rtoinst, '--no-deploydb', '-yh', qshell(hs), qshell(w)] - if options: - parts.append('--') - parts.extend(options) - print ' '.join(parts) diff --git a/lib/nulib/python/deploydb/uinst_module.py b/lib/nulib/python/deploydb/uinst_module.py deleted file mode 100644 index 615fe8a..0000000 --- a/lib/nulib/python/deploydb/uinst_module.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'Module', - 'module_matcher', 'modulename_matcher', -) - -import logging; log = logging.getLogger(__name__) -import os, sys -from os import path - -from .utils import * -from .expr import * -from .objects import XT, fileP, pathP, mpathP, lowerP, Object, catalog -from .parser import Parser -from .base_module import withdomain, host_matcher, hostname_matcher - -################################################################################ -# Module - -def match_module(qmodule, object): - if withpath(qmodule): # module avec chemin - return qmodule in object.get('module', ()) - else: # nom de module - return qmodule in object.get('modulename', ()) -def module_matcher(qmodule): - return lambda object: match_module(qmodule, object) - -def match_modulename(qmodule, object): - qmodule = path.basename(qmodule) - return qmodule in object.get('modulename', ()) -def modulename_matcher(qmodule): - return lambda object: match_modulename(qmodule, object) - -class Module(Object): - ATTRS = XT(Object, - values=pathP, module=mpathP, moduledir=pathP, - basedir=pathP, dirspec=fileP) - - def _resolve(self, catalog): - if self.oid == '*': return - default = catalog.get(self.otype, '*', None, False) - - modules = self.get('module', []) - basedir = self.get('basedir', None) - if basedir is not None: - modules.extend(self.resolve_basedir(basedir, dirs=True)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - modules.extend(self.resolve_filespec(dirspec, dirs=True)) - - if modules: - # générer moduledir et modulename à partir de module - moduledirs = [path.dirname(module) for module in modules] - if moduledirs: moduledirs = self.moduledir = ulistof(moduledirs) - - modulenames = [path.basename(module) for module in modules] - if modulenames: modulenames = self.modulename = ulistof(modulenames) - - else: - # générer modules à partir de moduledir et modulename - moduledirs = self.get('moduledir', ()) - if not moduledirs and default is not None: - moduledirs = default.get('moduledir', ()) - if moduledirs: moduledirs = self.moduledir = ulistof(moduledirs) - - modulenames = self.get('modulename', ()) - if not modulenames: modulenames = [self.oid] - if modulenames: modulenames = self.modulename = ulistof(modulenames) - - if moduledirs: - modules = [] - for modulename in modulenames: - found = [] - for moduledir in moduledirs: - module = path.join(moduledir, modulename) - if path.exists(module): - found.append(module) - break - if not found: - found = [path.join(moduledirs[0], modulename)] - modules.extend(found) - else: - modules = modulenames - if modules: modules = self.module = ulistof(modules) - - if not self.values: - self.values = modules - -################################################################################ -# Actions - -def query_xuinst(*args): - """afficher la commande pour déployer avec la commande $1 le module $2 sur - l'hôte $3 dans le profil $4 - - $1 peut valoir 'ruinst' ou 'uinst' ou être un chemin vers l'un de ces deux - scripts - - $2 peut être - * un nom de module: tout les modules de ce nom sont sélectionnés - * un chemin complet: si un module avec le chemin complet est trouvé, ne - sélectinner que celui-là, sinon faire comme si on n'avait spécifié que le - nom du module - * non spécifié: tout les modules devant être déployé sur l'hôte sont - cherchés - - $3 peut être - * un nom d'hôte: tous les hôtes de ce nom sont sélectionés - * un nom d'hôte pleinement qualifié: si le nom d'hôte pleinement qualifié - est trouvé, ne sélectionner que celui-là, sinon faire comme si on n'avait - spécifié que le nom d'hôte - * non spécifié: tous les hôtes vers lequel doit être déployé le module sont - cherchés - - $4 peut valoir - * 'NONE': seuls les déploiements sans profils définis sont sélectionnés. - c'est la valeur par défaut. - * 'ALL' ou '': ne pas tenir compte du profil lors de la sélection des - modules et des hôtes - * toute autre valeur, e.g prod ou test: seuls les déploiement de ce profil - sont sélectionnés - Il est possible de spécifier plusieurs profils en les séparant par des - virgules. Par exemple, 'prod,NONE' permet de sélectionner les déploiements - sans profil ou dans le profil 'prod' - - le module, ou l'hôte, ou les deux sont requis. le profil est facultatif. - - Les valeurs $5..$* sont des définitions d'attributs utilisées pour mettre à - jour les faits trouvés. Ces définitions sont utilisés comme argument de - uinst. - """ - xuinst = args[0] if args[0:1] else None - if xuinst is None: - verb = None - elif xuinst == 'ruinst' or xuinst.endswith('/ruinst'): - verb = 'ruinst' - elif xuinst == 'uinst' or xuinst.endswith('/uinst'): - verb = 'uinst' - else: - verb = None - if verb is None: - raise ValueError("Le verbe est requis et doit être 'uinst' ou 'ruinst'") - qmodule = args[1:2] and args[1] or None - qhost = args[2:3] and args[2] or None - qprofile = args[3] if args[3:4] else 'NONE' - supplattrs = args[4:] - - if not qmodule and not qhost: - raise ValueError("Il faut spécifier module et/ou host") - - if not qmodule: - modules = None - elif cwithpath(qmodule): - qmodule = path.abspath(qmodule) - modules = catalog.find_objects('module', expr=module_matcher(qmodule)) - if not modules: - modules = catalog.find_objects('module', expr=modulename_matcher(qmodule)) - else: - modules = catalog.find_objects('module', expr=modulename_matcher(qmodule)) - - if not qhost: - hosts = None - else: - if cwithpath(qhost): - qhost = path.basename(path.abspath(qhost)) - if withdomain(qhost): - hosts = catalog.find_objects('host', expr=host_matcher(qhost)) - if not hosts: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - else: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - - if qprofile == '': qprofile = 'ALL' - qprofiles = flattenstr([qprofile]) - if 'ALL' in qprofiles: - qprofile = None - else: - expr = [] - for qprofile in qprofiles: - if qprofile == 'NONE': - qprofile = NONE(EXISTS('profile')) - else: - qprofile = dict(profile=qprofile) - expr.append(qprofile) - qprofile = ANY(*expr) - - # modules et hosts sont spécifiés - if modules is not None and hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='module', tsexpr=dict(oid=[module.oid for module in modules]), - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # Seuls les modules sont spécifiés: chercher les hôtes - elif modules is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='module', tsexpr=dict(oid=[module.oid for module in modules]), - ttotype='host', - expr=qprofile, - ) - - # Seuls les hôtes sont spécifiés: chercher les modules - elif hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='module', - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # afficher la commande - if supplattrs: parser = Parser() - for fact, tsobjects, ttobjects in facts: - hs = flattenseq([host.host for host in ttobjects]) - ms = flattenseq([module.module for module in tsobjects]) - if supplattrs: parser.parse_attrs(supplattrs, fact) - vars = [] - for name, values in fact.attrs.items(): - vars.append("%s=%s" % (name, qshell(':'.join(values)))) - for m in ms: - # préférer si possible le chemin fourni par l'utilisateur - if withpath(qmodule): m = qmodule - if fact.verb == 'uinst': - # chaque hôte est traité à part avec uinst:rsync - for h in hs: - parts = [xuinst, '--no-deploydb', '-yh', qshell(h), qshell(m)] - if vars: - parts.extend(['--', ' '.join(vars)]) - print ' '.join(parts) - elif fact.verb == 'ruinst': - hs = ':'.join(hs) - parts = [xuinst, '--no-deploydb', '-h', qshell(hs), qshell(m), '--', '-y'] - if vars: parts.append(' '.join(vars)) - print ' '.join(parts) diff --git a/lib/nulib/python/deploydb/utils.py b/lib/nulib/python/deploydb/utils.py deleted file mode 100644 index 1d0813d..0000000 --- a/lib/nulib/python/deploydb/utils.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Fonctions utilitaires diverses -""" - -__all__ = ( - 'isnum', - 'istrue', 'isfalse', - 'isseq', 'seqof', 'listof', 'ulistof', - 'flattenstr', 'flattenseq', - 'qshell', - 'withpath', 'cwithpath', 'find_in_path', 'relpath', -) - -from os import path - -def isnum(v): - return isinstance(v, int) or isinstance(v, long) - -def istrue(b): - s = str(b).lower() - if s in ('true', 'vrai', 'yes', 'oui', '1'): - return True - elif s in ('false', 'faux', 'no', 'non', '0'): - return False - else: - return bool(b) # laisser python décider de la valeur - -def isfalse(b): - s = str(b).lower() - if s in ('true', 'vrai', 'yes', 'oui', '1'): - return False - elif s in ('false', 'faux', 'no', 'non', '0'): - return True - else: - return not bool(b) # laisser python décider de la valeur - -def isseq(t): - """Tester si t est une séquence - """ - return isinstance(t, list) or isinstance(t, tuple) or isinstance(t, set) - -_SEQOF_UNDEF = object() -def seqof(o, noneValue=_SEQOF_UNDEF): - """Retourner un tuple à parti de o - * si o est une séquence, retourner tuple(o) - * si noneValue est défini, et que o is noneValue, retourner noneValue - * sinon, retourner le tuple (o,) - """ - if isseq(o): return tuple(o) - elif o is noneValue and noneValue is not _SEQOF_UNDEF: return noneValue - else: return (o,) - -_LISTOF_UNDEF = object() -def listof(o, noneValue=_LISTOF_UNDEF): - """Retourner une nouvelle liste à parti de o - * si o est une séquence, retourner list(o) - * si noneValue est défini, et que o is noneValue, retourner noneValue - * sinon, retourner la liste [o] - """ - if isseq(o): return list(o) - elif o is noneValue and noneValue is not _LISTOF_UNDEF: return noneValue - else: return [o] - -def ulistof(o, noneValue=_LISTOF_UNDEF): - """Retourner une nouvelle liste à parti de o - * si o est une séquence, retourner list(o) - * si noneValue est défini, et que o is noneValue, retourner noneValue - * sinon, retourner la liste [o] - - La différence avec listof(), c'est que les doublons sont supprimés de la - liste, tout en préservant l'ordre original, ce qui n'est pas le cas avec - set() - """ - if isseq(o): tmplist = list(o) - elif o is noneValue and noneValue is not _LISTOF_UNDEF: return noneValue - else: return [o] - ulist = [] - for item in tmplist: - if item not in ulist: ulist.append(item) - return ulist - -def flattenstr(src, unique=True, clean=True, sep=','): - """découper chaque élément du tableau src selon sep et les aplatir dans une - seule liste. - - Si unique==True, supprimer les doublons. - Si clean==True, supprimer les valeurs vides et les espaces périphériques - - e.g flattenstr(['a , b', 'c,']) --> ['a', 'b', 'c'] - """ - if src is None: return None - dest = [] - for items in seqof(src): - items = items.split(sep) - if clean: items = filter(None, map(lambda item: item.strip(), items)) - if unique: - for item in items: - if item not in dest: dest.append(item) - else: - dest.extend(items) - return dest - -def flattenseq(seq): - """aplatir les éléments de seq en une seule liste - - e.g flattenlist([(1, 2), (3, 4), 5]) --> [1, 2, 3, 4, 5] - """ - if seq is None: return None - if not isseq(seq): return [seq] - items = [] - for item in seq: - if isseq(item): items.extend(item) - else: items.append(item) - return items - -def qshell(values): - if isseq(values): return map(qshell, values) - elif not values: return '' - else: return "'%s'" % values.replace("'", "'\\''") - -def withpath(p): return p is not None and '/' in p -def cwithpath(p): return p is not None and ('/' in p or p in ('.', '..')) - -def find_in_path(filename, dirs, allow_path=False): - """chercher le fichier nommé filename dans les répertoires dirs - - si filename est un chemin (contient le caractère '/' ou path.sep) alors la - valeur est retournée telle quelle, sauf si allow_path=True - - retourner le chemin complet dir/filename si le fichier est trouvé, ou None - si le fichier ne figure dans aucun des répertoires - - """ - is_path = '/' in filename or path.sep in filename - if is_path and not allow_path: return filename - - for dir in dirs: - pf = path.join(dir, filename) - if path.isfile(pf): return pf - return None - -def relpath(filep, refp, abspath=True): - """exprimer filep par rapport au répertoire de refp - - si abspath==True, rendre le chemin absolu - """ - pf = path.join(dirname(refp), filep) - if abspath: pf = path.abspath(pf) - return pf diff --git a/lib/nulib/python/deploydb/woinst_module.py b/lib/nulib/python/deploydb/woinst_module.py deleted file mode 100644 index 1ee93e3..0000000 --- a/lib/nulib/python/deploydb/woinst_module.py +++ /dev/null @@ -1,332 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'Wobundle', - 'wobundle_matcher', 'wobundlename_matcher', -) - -import logging; log = logging.getLogger(__name__) -import os, sys -from os import path - -from .utils import * -from .expr import * -from .objects import XT, fileP, pathP, mpathP, lowerP, Object, catalog -from .parser import Parser -from .base_module import withdomain, host_matcher, hostname_matcher - -################################################################################ -# Wobundle - -def match_wobundle(qwobundle, object): - if withpath(qwobundle): # wobundle avec chemin - return qwobundle in object.get('wobundle', ()) - else: # nom de wobundle - return qwobundle in object.get('wobundlename', ()) -def wobundle_matcher(qwobundle): - return lambda object: match_wobundle(qwobundle, object) - -def match_wobundlename(qwobundle, object): - qwobundle = path.basename(qwobundle) - return qwobundle in object.get('wobundlename', ()) -def wobundlename_matcher(qwobundle): - return lambda object: match_wobundlename(qwobundle, object) - -class Wobundle(Object): - ATTRS = XT(Object, - values=pathP, wobundle=mpathP, wobundledir=pathP, - basedir=pathP, dirspec=fileP) - - FILESPECS = ['*.woa', '*.framework'] - - def _resolve(self, catalog): - if self.oid == '*': return - default = catalog.get(self.otype, '*', None, False) - - wobundles = self.get('wobundle', []) - basedir = self.get('basedir', None) - if basedir is not None: - wobundles.extend(self.resolve_basedir(basedir, dirs=True, filespec=self.FILESPECS)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - wobundles.extend(self.resolve_filespec(dirspec, dirs=True)) - - if wobundles: - # générer wobundledir et wobundlename à partir de wobundle - wobundledirs = [path.dirname(wobundle) for wobundle in wobundles] - if wobundledirs: wobundledirs = self.wobundledir = ulistof(wobundledirs) - - wobundlenames = [path.basename(wobundle) for wobundle in wobundles] - if wobundlenames: wobundlenames = self.wobundlename = ulistof(wobundlenames) - - else: - # générer wobundles à partir de wobundledir et wobundlename - wobundledirs = self.get('wobundledir', ()) - if not wobundledirs and default is not None: - wobundledirs = default.get('wobundledir', ()) - if wobundledirs: wobundledirs = self.wobundledir = ulistof(wobundledirs) - - wobundlenames = self.get('wobundlename', ()) - if not wobundlenames: wobundlenames = ['%s.woa' % self.oid] - if wobundlenames: wobundlenames = self.wobundlename = ulistof(wobundlenames) - - if wobundledirs: - wobundles = [] - for wobundlename in wobundlenames: - found = [] - for wobundledir in wobundledirs: - wobundle = path.join(wobundledir, wobundlename) - if path.exists(wobundle): - found.append(wobundle) - break - if not found: - found = [path.join(wobundledirs[0], wobundlename)] - wobundles.extend(found) - else: - wobundles = wobundlenames - if wobundles: wobundles = self.wobundle = ulistof(wobundles) - - if not self.values: - self.values = wobundles - -################################################################################ -# Actions - -def option_choice(yesoption, nooption): - def func(value): - if istrue(value): return yesoption - else: return nooption - return func - -def what_choice(value): - if value in ('bundle',): - return '--bundle' - elif value in ('webres',): - return '--webres' - else: - log.warning("invalid what value %s", value) - return None - -def restart_choice(value): - if istrue(value) or value in ('restart', 'stop-start'): - return '--stop-start' - elif value in ('bounce',): - return '--bounce' - elif isfalse(value) or value in ('no-restart',): - return '--no-restart' - else: - log.warning("invalid restart value %s", value) - return None - -WOINST_ATTRS = { - 'prefix': dict( - func=option_choice('--prefix', '--no-prefix'), - ), 'what': dict( - func=what_choice, - ), 'tag': dict( - func=option_choice('--tag', '--no-tag'), - ), 'dbconfig': dict( - option='--active-dbconfig', - ), 'dbconfig_map': dict( - option='--active-dbconfig-map', - multiple=True, - flattensep=',', - ), 'restart': dict( - func=restart_choice, - ), 'exec': dict( - option='--exec', - multiple=True, - ), -} - -def query_rwoinst(*args): - """afficher la commande pour déployer avec la commande $1 le wobundle $2 sur - l'hôte $3 dans le profil $4 - - $1 doit valoir 'rwoinst' ou être un chemin vers ce script - - $2 peut être - * un nom de wobundle: tout les wobundles de ce nom sont sélectionnés - * un chemin complet: si un wobundle avec le chemin complet est trouvé, ne - sélectinner que celui-là, sinon faire comme si on n'avait spécifié que le - nom du wobundle - * non spécifié: tout les wobundles devant être déployé sur l'hôte sont - cherchés - - $3 peut être - * un nom d'hôte: tous les hôtes de ce nom sont sélectionés - * un nom d'hôte pleinement qualifié: si le nom d'hôte pleinement qualifié - est trouvé, ne sélectionner que celui-là, sinon faire comme si on n'avait - spécifié que le nom d'hôte - * non spécifié: tous les hôtes vers lequel doit être déployé le wobundle sont - cherchés - - $4 peut valoir - * 'NONE': seuls les déploiements sans profils définis sont sélectionnés. - c'est la valeur par défaut. - * 'ALL' ou '': ne pas tenir compte du profil lors de la sélection des - wobundles et des hôtes - * toute autre valeur, e.g prod ou test: seuls les déploiement de ce profil - sont sélectionnés - Il est possible de spécifier plusieurs profils en les séparant par des - virgules. Par exemple, 'NONE,prod' permet de sélectionner les déploiements - sans profil ou dans le profil 'prod' - - le wobundle, ou l'hôte, ou les deux sont requis. le profil est facultatif. - - Les valeurs $5..$* sont des définitions d'attributs utilisées pour mettre à - jour les faits trouvés. Les mappings suivants sont supportés: - - = attribut = = option de woinst = - what --bundle / --webres - dbconfig --active-dbconfig - dbconfig_map --active-dbconfig-map - restart --stop-start / --bounce - exec --exec - tag --tag / --no-tag - prefix --prefix / --no-prefix - destdir HTDOCSDIR= - - Les autres définitions sont utilisées comme argument de woinst, pour définir - les valeurs des préfixes. - """ - rwoinst = args[0] if args[0:1] else None - if rwoinst is not None and (rwoinst == 'rwoinst' or rwoinst.endswith('/rwoinst')): - verb = 'rwoinst' - else: - raise ValueError("Le verbe est requis et doit valoir 'rwoinst'") - qwobundle = args[1:2] and args[1] or None - qhost = args[2:3] and args[2] or None - qprofile = args[3] if args[3:4] else 'NONE' - supplattrs = args[4:] - - if not qwobundle and not qhost: - raise ValueError("Il faut spécifier wobundle et/ou host") - - if not qwobundle: - wobundles = None - elif cwithpath(qwobundle): - qwobundle = path.abspath(qwobundle) - wobundles = catalog.find_objects('wobundle', expr=wobundle_matcher(qwobundle)) - if not wobundles: - wobundles = catalog.find_objects('wobundle', expr=wobundlename_matcher(qwobundle)) - else: - wobundles = catalog.find_objects('wobundle', expr=wobundlename_matcher(qwobundle)) - - if not qhost: - hosts = None - else: - if cwithpath(qhost): - qhost = path.basename(path.abspath(qhost)) - if withdomain(qhost): - hosts = catalog.find_objects('host', expr=host_matcher(qhost)) - if not hosts: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - else: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - - if qprofile == '': qprofile = 'ALL' - qprofiles = flattenstr([qprofile]) - if 'ALL' in qprofiles: - qprofile = None - else: - expr = [] - for qprofile in qprofiles: - if qprofile == 'NONE': - qprofile = NONE(EXISTS('profile')) - else: - qprofile = dict(profile=qprofile) - expr.append(qprofile) - qprofile = ANY(*expr) - - # wobundles et hosts sont spécifiés - if wobundles is not None and hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wobundle', tsexpr=dict(oid=[wobundle.oid for wobundle in wobundles]), - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # Seuls les wobundles sont spécifiés: chercher les hôtes - elif wobundles is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wobundle', tsexpr=dict(oid=[wobundle.oid for wobundle in wobundles]), - ttotype='host', - expr=qprofile, - ) - - # Seuls les hôtes sont spécifiés: chercher les wobundles - elif hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wobundle', - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # afficher la commande - if supplattrs: parser = Parser() - for fact, tsobjects, ttobjects in facts: - if supplattrs: parser.parse_attrs(supplattrs, fact) - hs = ':'.join(flattenseq([host.host for host in ttobjects])) - - for wobundle in tsobjects: - # construire les options de woinst. on prend les valeurs d'abord - # dans le fait puis dans l'objet wobundle. - options = [] - vars = [] - names = set(fact.attrs.keys()) - names.update(wobundle.attrs.keys()) - for name in names: - values = fact.get(name, None) - factvalue = True - if values is None: - values = wobundle.get(name, None) - factvalue = False - if values is None: - # ne devrait pas se produire en principe - continue - if name in ('profile',): - # les attributs de sélection du profil ont été déjà été traités - # plus haut - continue - elif name == 'destdir': - name = 'HTDOCSDIR' - params = WOINST_ATTRS.get(name, None) - if params is None: - if factvalue: - # les variables spécifiques ne sont prise que dans le - # fait. - vars.append("%s=%s" % (name, qshell(':'.join(values)))) - else: - func = params.get('func', None) - option = params.get('option', None) - if func is not None: - option = func(values[0]) - if option is not None: - options.extend(listof(option)) - elif option is not None: - if params.get('multiple', False): - flattensep = params.get('flattensep', None) - if flattensep is not None: - values = flattenstr(values, flattensep) - for value in values: - options.append(option) - options.append(qshell(value)) - else: - options.append(option) - options.append(qshell(values[0])) - else: - raise ValueError("missing option key for attribute %s" % name) - - for w in wobundle.wobundle: - # préférer si possible le chemin fourni par l'utilisateur - if withpath(qwobundle): w = qwobundle - parts = [rwoinst, '--no-deploydb', '-yh', qshell(hs), qshell(w)] - if options or vars: - parts.append('--') - if options: parts.extend(options) - if vars: parts.extend(vars) - print ' '.join(parts) diff --git a/lib/nulib/python/deploydb/wyinst_module.py b/lib/nulib/python/deploydb/wyinst_module.py deleted file mode 100644 index 6adac71..0000000 --- a/lib/nulib/python/deploydb/wyinst_module.py +++ /dev/null @@ -1,223 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = ( - 'Wyapp', - 'wyapp_matcher', 'wyappname_matcher', -) - -import logging; log = logging.getLogger(__name__) -import os, sys -from os import path - -from .utils import * -from .expr import * -from .objects import XT, fileP, pathP, mpathP, lowerP, Object, catalog -from .parser import Parser -from .base_module import withdomain, host_matcher, hostname_matcher - -################################################################################ -# Wyapp - -def match_wyapp(qwyapp, object): - if withpath(qwyapp): # wyapp avec chemin - return qwyapp in object.get('wyapp', ()) - else: # nom de wyapp - return qwyapp in object.get('wyappname', ()) -def wyapp_matcher(qwyapp): - return lambda object: match_wyapp(qwyapp, object) - -def match_wyappname(qwyapp, object): - qwyapp = path.basename(qwyapp) - return qwyapp in object.get('wyappname', ()) -def wyappname_matcher(qwyapp): - return lambda object: match_wyappname(qwyapp, object) - -class Wyapp(Object): - ATTRS = XT(Object, - values=pathP, wyapp=mpathP, wyappdir=pathP, - basedir=pathP, dirspec=fileP) - - def _resolve(self, catalog): - if self.oid == '*': return - default = catalog.get(self.otype, '*', None, False) - - wyapps = self.get('wyapp', []) - basedir = self.get('basedir', None) - if basedir is not None: - wyapps.extend(self.resolve_basedir(basedir, dirs=True)) - dirspec = self.get('dirspec', None) - if dirspec is not None: - wyapps.extend(self.resolve_filespec(dirspec, dirs=True)) - - if wyapps: - # générer wyappdir et wyappname à partir de wyapp - wyappdirs = [path.dirname(wyapp) for wyapp in wyapps] - if wyappdirs: wyappdirs = self.wyappdir = ulistof(wyappdirs) - - wyappnames = [path.basename(wyapp) for wyapp in wyapps] - if wyappnames: wyappnames = self.wyappname = ulistof(wyappnames) - - else: - # générer wyapps à partir de wyappdir et wyappname - wyappdirs = self.get('wyappdir', ()) - if not wyappdirs and default is not None: - wyappdirs = default.get('wyappdir', ()) - if wyappdirs: wyappdirs = self.wyappdir = ulistof(wyappdirs) - - wyappnames = self.get('wyappname', ()) - if not wyappnames: wyappnames = [self.oid] - if wyappnames: wyappnames = self.wyappname = ulistof(wyappnames) - - if wyappdirs: - wyapps = [] - for wyappname in wyappnames: - found = [] - for wyappdir in wyappdirs: - wyapp = path.join(wyappdir, wyappname) - if path.exists(wyapp): - found.append(wyapp) - break - if not found: - found = [path.join(wyappdirs[0], wyappname)] - wyapps.extend(found) - else: - wyapps = wyappnames - if wyapps: wyapps = self.wyapp = ulistof(wyapps) - - if not self.values: - self.values = wyapps - -################################################################################ -# Actions - -def query_rwyinst(*args): - """afficher la commande pour déployer avec la commande $1 le wyapp $2 sur - l'hôte $3 dans le profil $4 - - $1 doit valoir 'rwyinst' ou être un chemin vers ce script - - $2 peut être - * un nom de wyapp: tout les wyapps de ce nom sont sélectionnés - * un chemin complet: si un wyapp avec le chemin complet est trouvé, ne - sélectinner que celui-là, sinon faire comme si on n'avait spécifié que le - nom du wyapp - * non spécifié: tout les wyapps devant être déployé sur l'hôte sont - cherchés - - $3 peut être - * un nom d'hôte: tous les hôtes de ce nom sont sélectionés - * un nom d'hôte pleinement qualifié: si le nom d'hôte pleinement qualifié - est trouvé, ne sélectionner que celui-là, sinon faire comme si on n'avait - spécifié que le nom d'hôte - * non spécifié: tous les hôtes vers lequel doit être déployé le wyapp sont - cherchés - - $4 peut valoir - * 'NONE': seuls les déploiements sans profils définis sont sélectionnés. - c'est la valeur par défaut. - * 'ALL' ou '': ne pas tenir compte du profil lors de la sélection des - wyapps et des hôtes - * toute autre valeur, e.g prod ou test: seuls les déploiement de ce profil - sont sélectionnés - Il est possible de spécifier plusieurs profils en les séparant par des - virgules. Par exemple, 'prod,NONE' permet de sélectionner les déploiements - sans profil ou dans le profil 'prod' - - le wyapp, ou l'hôte, ou les deux sont requis. le profil est facultatif. - - Les valeurs $5..$* sont des définitions d'attributs utilisées pour mettre à - jour les faits trouvés. Ces définitions sont utilisés comme argument de - wyinst. - """ - rwyinst = args[0] if args[0:1] else None - if rwyinst is not None and (rwyinst == 'rwyinst' or rwyinst.endswith('/rwyinst')): - verb = 'rwyinst' - else: - raise ValueError("Le verbe est requis et doit valoir 'rwyinst'") - qwyapp = args[1:2] and args[1] or None - qhost = args[2:3] and args[2] or None - qprofile = args[3] if args[3:4] else 'NONE' - supplattrs = args[4:] - - if not qwyapp and not qhost: - raise ValueError("Il faut spécifier wyapp et/ou host") - - if not qwyapp: - wyapps = None - elif cwithpath(qwyapp): - qwyapp = path.abspath(qwyapp) - wyapps = catalog.find_objects('wyapp', expr=wyapp_matcher(qwyapp)) - if not wyapps: - wyapps = catalog.find_objects('wyapp', expr=wyappname_matcher(qwyapp)) - else: - wyapps = catalog.find_objects('wyapp', expr=wyappname_matcher(qwyapp)) - - if not qhost: - hosts = None - else: - if cwithpath(qhost): - qhost = path.basename(path.abspath(qhost)) - if withdomain(qhost): - hosts = catalog.find_objects('host', expr=host_matcher(qhost)) - if not hosts: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - else: - hosts = catalog.find_objects('host', expr=hostname_matcher(qhost)) - - if qprofile == '': qprofile = 'ALL' - qprofiles = flattenstr([qprofile]) - if 'ALL' in qprofiles: - qprofile = None - else: - expr = [] - for qprofile in qprofiles: - if qprofile == 'NONE': - qprofile = NONE(EXISTS('profile')) - else: - qprofile = dict(profile=qprofile) - expr.append(qprofile) - qprofile = ANY(*expr) - - # wyapps et hosts sont spécifiés - if wyapps is not None and hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wyapp', tsexpr=dict(oid=[wyapp.oid for wyapp in wyapps]), - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # Seuls les wyapps sont spécifiés: chercher les hôtes - elif wyapps is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wyapp', tsexpr=dict(oid=[wyapp.oid for wyapp in wyapps]), - ttotype='host', - expr=qprofile, - ) - - # Seuls les hôtes sont spécifiés: chercher les wyapps - elif hosts is not None: - facts = catalog.find_facts( - verb=verb, - tsotype='wyapp', - ttotype='host', ttexpr=dict(oid=[host.oid for host in hosts]), - expr=qprofile, - ) - - # afficher la commande - if supplattrs: parser = Parser() - for fact, tsobjects, ttobjects in facts: - hs = flattenseq([host.host for host in ttobjects]) - ws = flattenseq([wyapp.wyapp for wyapp in tsobjects]) - if supplattrs: parser.parse_attrs(supplattrs, fact) - vars = [] - for name, values in fact.attrs.items(): - vars.append("%s=%s" % (name, qshell(':'.join(values)))) - for w in ws: - # préférer si possible le chemin fourni par l'utilisateur - if withpath(qwyapp): w = qwyapp - hs = ':'.join(hs) - parts = [xwyinst, '--no-deploydb', '-h', qshell(hs), qshell(w), '--', '-y'] - if vars: parts.append(' '.join(vars)) - print ' '.join(parts) diff --git a/lib/nulib/python/nulib/__init__.py b/lib/nulib/python/nulib/__init__.py deleted file mode 100644 index 9d853e8..0000000 --- a/lib/nulib/python/nulib/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = () - diff --git a/lib/nulib/python/nulib/args.py b/lib/nulib/python/nulib/args.py deleted file mode 100644 index b2627c5..0000000 --- a/lib/nulib/python/nulib/args.py +++ /dev/null @@ -1,610 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Gestion des arguments de la ligne de commande. -""" - -__all__ = ('split_args', 'join_args', 'build_options', 'get_args', - 'Options', - ) - -import sys, re -from getopt import gnu_getopt - -from .base import isstr, isbool, seqof, odict -from .output import set_verbosity, VERBOSITY_OPTS -from .input import set_interaction, INTERACTION_OPTS -from .functions import apply_args - -RE_SPACES = re.compile(r'[ \t\r\n]+') -RE_QUOTE = re.compile(r'"') -RE_QQUOTE = re.compile(r'\\"') -RE_SPACES_OR_QUOTES = re.compile(r'[ \t\r\n"]+') -RE_TOKEN = re.compile(r'[^ \t\r\n"]+') -RE_QTOKEN = re.compile(r'"((?:\\"|[^"])*)"?') - -def has_spaces(cl): - return RE_SPACES.match(cl) is not None -def skip_spaces(pcl): - mo = RE_SPACES.match(pcl[0]) - if mo is not None: - pcl[0] = pcl[0][mo.end(0):] -def get_token(pcl): - token = None - mo = RE_TOKEN.match(pcl[0]) - if mo is not None: - token, pcl[0] = pcl[0][:mo.end(0)], pcl[0][mo.end(0):] - return token -def get_qtoken(pcl): - qtoken = None - mo = RE_QTOKEN.match(pcl[0]) - if mo is not None: - qtoken, pcl[0] = mo.group(1), pcl[0][mo.end(0):] - return qtoken - -def split_args(cl): - """Lire une chaine, et la découper en plusieurs arguments, à utiliser par - exemple avec getopt() ou get_args(). - - Note: les arguments peuvent être entre quotes, mais pour le moment, seul " - est supporté, pas '. - XXX ajouter le support de ' comme quote. - - @return: la liste des arguments, ou None si cl==None - @rtype: list - """ - if cl is None: return None - - args = [] - pcl = [cl] - while pcl[0]: - if has_spaces(pcl[0]): - skip_spaces(pcl) - if not pcl[0]: - break - - arg = '' - while pcl[0] and not has_spaces(pcl[0]): - if pcl[0][:1] == '"': - arg = arg + RE_QQUOTE.sub('"', get_qtoken(pcl)) - else: - arg = arg + get_token(pcl) - - args.append(arg) - - return args - -def join_args(args): - """L'opération inverse de split_args - - @return: une chaine, ou None si args==None - """ - if args is None: return None - i = 0 - for i in range(len(args)): - arg = args[i] - if not args or RE_SPACES_OR_QUOTES.search(arg) is not None: - args[i] = '"%s"' % RE_QUOTE.sub(r'\"', arg) - return ' '.join(args) - -def build_options(argsdesc): - """Construire une liste d'options pour utilisation avec get_args ou getopt. - - A partir d'une liste de termes (option, longoptions, desc), construire et - retourner (options, longoptions), où options est un chaine et longoptions - une liste, pour utilisation avec getopt() ou get_args(). - - @return: (options, longoptions) - @rtype: tuple - """ - options = '' - longoptions = [] - if argsdesc is not None: - for argdesc in argsdesc: - if argdesc[0:1] and argdesc[0] is not None: - options += argdesc[0] - if argdesc[1:2] and argdesc[1] is not None: - longopts = argdesc[1] - if isstr(longopts): longopts = (longopts,) - longoptions.extend(filter(None, longopts)) - return options, longoptions - -# options courtes à faire traiter par set_verbosity() ou set_interaction() -M_OPTIONS = {} -# options longues à faire traiter par set_verbosity() ou set_interaction() -M_LONGOPTIONS = {} -for _opt in VERBOSITY_OPTS: - if _opt.startswith('--'): M_LONGOPTIONS[_opt] = False - elif _opt.startswith('-'): M_OPTIONS[_opt] = False -for _opt in INTERACTION_OPTS: - if _opt.startswith('--'): M_LONGOPTIONS[_opt] = False - elif _opt.startswith('-'): M_OPTIONS[_opt] = False -del _opt - -RE_OPTION = re.compile(r'.:?') -def get_args(args=None, options=None, longoptions=None, **optdescs): - """frontend pour getopt qui reconnait les options de set_verbosity et - set_interaction(), et mets à jour les niveaux automatiquement. - """ - if args is None: args = sys.argv[1:] - if options is None: options = '' - longoptions = seqof(longoptions, []) - - options = RE_OPTION.findall(options) - longoptions = list(longoptions) - - def in_options(opt, options=options): - """Retourner True si l'option opt est mentionnée dans options, sans - tenir compte du fait qu'elle prend ou non un argument dans options. - - Si opt n'est pas mentionné dans options, l'y rajouter. - opt doit être de la forme 'o' ou 'o:' - """ - normopt = opt[:1] - for option in options: - normoption = option[:1] - if normopt == normoption: return True - options.append(opt) - return False - def in_longoptions(longopt, longoptions=longoptions): - """Retourner True si l'option longue longopt est mentionnée dans - longoptions, sans tenir compte du fait qu'elle prend ou non un argument - dans longoptions. - - Si longopt n'est pas mentionné dans longoptions, l'y rajouter. - longopt doit être de la forme 'longopt' ou 'longopt=' - """ - if longopt[-1:] == '=': normlongopt = longopt[:-1] - else: normlongopt = longopt - for longoption in longoptions: - if longoption[-1:] == '=': normlongoption = longoption[:-1] - else: normlongoption = longoption - if normlongopt == normlongoption: return True - longoptions.append(longopt) - return False - - # déterminer quelles options seront reconnues par set_verbosity. il s'agit - # de toutes celles qui ne sont pas traitées par l'utilisateur - m_options = M_OPTIONS.copy() - m_longoptions = M_LONGOPTIONS.copy() - - for m_option in m_options.keys(): - # m_option est de la forme '-o' - if not in_options(m_option[1:]): - m_options[m_option] = True - for m_longoption in m_longoptions.keys(): - # m_longoption est de la forme '--longopt' - if not in_longoptions(m_longoption[2:]): - m_longoptions[m_longoption] = True - - # appliquer les options reconnues par set_verbosity - options = ''.join(options) - optvalues, args = gnu_getopt(args, options, longoptions) - for i in range(len(optvalues)): - opt, _ = optvalues[i] - set_verbosity_or_interaction = False - if m_longoptions.get(opt, False): # long options - set_verbosity_or_interaction = True - elif m_options.get(opt, False): # options - set_verbosity_or_interaction = True - if set_verbosity_or_interaction: - if opt in VERBOSITY_OPTS: - set_verbosity(opt) - elif opt in INTERACTION_OPTS: - set_interaction(opt) - optvalues[i] = None - - # retourner les autres options qui n'ont pas été reconnues - return filter(None, optvalues), args - -################################################################################ - -_none = object() - -RE_PREFIX = re.compile(r'^-*') -RE_SUFFIX = re.compile(r'[:=]$') -RE_STUFF = re.compile(r'[^a-zA-Z0-9]') -def opt2name(opt): - """Obtenir un nom de variable dérivé d'un nom d'option - - Les tirets de début et les caractères : et = de fin sont supprimés, et les - caractères spéciaux sont remplacés par '_' - """ - name = RE_PREFIX.sub('', opt) - name = RE_SUFFIX.sub('', name) - name = RE_STUFF.sub('_', name) - return name - -class Option(object): - """Un objet stockant la description d'une option unique - - optdef définition de l'option, e.g. 'o', 'o:', 'long-option', ou - 'long-option=' - optname nom de l'option, e.g. 'o' ou 'long-option' - short est-ce une option courte? - takes_value - cette option prend-elle un argument? - - action action associée à cette option. - name nom de la variable associée à l'option. - """ - - _short, short = None, property(lambda self: self._short) - _optdef, optdef = None, property(lambda self: self._optdef) - _optname, optname = None, property(lambda self: self._optname) - _takes_value, takes_value = None, property(lambda self: self._takes_value) - - def __init(self, short, optdef, optname, takes_value): - self._short = short - self._optdef = optdef - self._optname = optname - self._takes_value = takes_value - - _action, action = None, property(lambda self: self._action) - _name, name = None, property(lambda self: self._name) - - LONGOPTION_PATTERN = r'(([a-zA-Z0-9$*@!_][a-zA-Z0-9$*@!_-]*)=?)' - RE_LONGOPTION0 = re.compile(r'--%s$' % LONGOPTION_PATTERN) - RE_LONGOPTION1 = re.compile(r'%s$' % LONGOPTION_PATTERN) - OPTION_PATTERN = r'(([a-zA-Z0-9$*@!_]):?)' - RE_OPTION0 = re.compile(r'-%s$' % OPTION_PATTERN) - RE_OPTION1 = re.compile(r'%s$' % OPTION_PATTERN) - - def __init__(self, optdef): - if not optdef: raise ValueError("optdef is required") - - mo = self.RE_LONGOPTION0.match(optdef) - if mo is not None: - self.__init(False, mo.group(1), mo.group(2), mo.group(1) != mo.group(2)) - else: - mo = self.RE_OPTION0.match(optdef) - if mo is not None: - self.__init(True, mo.group(1), mo.group(2), mo.group(1) != mo.group(2)) - else: - mo = self.RE_OPTION1.match(optdef) - if mo is not None: - self.__init(True, mo.group(1), mo.group(2), mo.group(1) != mo.group(2)) - else: - mo = self.RE_LONGOPTION1.match(optdef) - if mo is not None: - self.__init(False, mo.group(1), mo.group(2), mo.group(1) != mo.group(2)) - else: - raise ValueError("Invalid option: %s" % optdef) - - def __str__(self): - prefix = self._short and '-' or '--' - return '%s%s' % (prefix, self._optname) - str = __str__ - opt = property(__str__) - - def __repr__(self): - option = self.__str__() - if self._takes_value: - if self._short: option += ':' - else: option += '=' - return '%s(%s)' % (self.__class__.__name__, repr(option)) - repr = __repr__ - - def same_optdef(self, other): - return isinstance(other, Option) and self._optdef == other.optdef - def same_optname(self, other): - return isinstance(other, Option) and \ - self._optname == other.optname and \ - self._takes_value == other.takes_value - def __eq__(self, other): - if isstr(other): - return self.__str__() == other - elif isinstance(other, Option): - return self._optdef == other.optdef - else: - return False - - def set_action(self, action, name=None): - self._action = action - self._name = name - -class Action(object): - """Une action associée à une option quand elle est rencontrée sur la ligne - de commande. - - name nom de la variable associée à l'option, None s'il faut le calculer - initial si une valeur est associée à l'option, valeur initiale de cette - option. - - Cet objet doit implémenter une méthode __call__() qui prend les arguments - (option[, value[, options]]) - La méthode doit retourner False si elle veut indiquer qu'elle n'a pas pu - mettre à jour la valeur. Tout autre valeur indique le succès. - - option est une instance de Option. value est la valeur associée à l'option, - ou _none si l'option ne prend pas d'argument. options est l'instance de - l'objet Options qui analyse les arguments. - """ - - name = property(lambda self: None) - initial = property(lambda self: None) - - def __call__(self, option=None, value=_none, options=None): - pass - -class Options(object): - """Une classe permettant de traiter des arguments en ligne de commande. - - Son objectif est d'offrir une solution plus flexible que les fonctions - build_options et get_args() - - Avec le constructeur et la méthode add_option(), il est possible de - construire la liste des options valides. - - Ensuite, la méthode parse() permet d'analyser la ligne de commande. Par - défaut, si une méthode n'est pas définie pour une option, ou si la méthode - définie retourne False, initialiser une variable nommée d'après l'option, en - remplaçant sa valeur (si l'option prend un argument) ou lui ajoutant 1 (si - l'option ne prend pas d'argument). - """ - - class SetValue(Action): - """Mettre à jour une variable - - value valeur qu'il faut forcer, ou _none s'il faut prendre la valeur par - défaut. Si l'option prend un argument, la valeur par défaut est la - valeur spécifiée sur la ligne de commande. Sinon, il s'agit d'une - valeur incrémentée représentant le nombre de fois que l'option - apparait. - name nom de la variable à initialiser, ou None s'il faut dériver le nom - de la variable à partir du nom de l'option. - initial valeur initiale de la variable - """ - - _value = None - _name, name = None, property(lambda self: self._name) - _initial, initial = None, property(lambda self: self._initial) - - def __init__(self, value=_none, name=None, initial=None): - self._value = value - self._name = name - self._initial = initial - - def __call__(self, option=None, value=_none, options=None): - # nom: celui qui est spécifié dans le constructeur, ou un nom dérivé du - # nom de l'option - name = self._name - if name is None: name = opt2name(option.optname) - # valeur: celle qui est spécifiée dans le constructeur, ou alors laisser - # options sans charger - if self._value is not _none: value = self._value - - # mettre à jour la valeur - options.update_value(option, value) - - class CallMethod(Action): - _method = None - - def __init__(self, method=None): - self._method = method - - def __call__(self, option=None, value=None, options=None): - return apply_args(self._method, option, value, options) - - # type d'analyse: '+' pour s'arrêter à la première non option, '' sinon - _parseopt = None - - # liste d'options courtes, instances de Option - _soptions = None - - # liste d'options longues, instances de Option - _loptions = None - - # valeurs stockées dans cet objet - _values = None - - # dictionnaire des options définies, avec chacune une instance de Option - # associée - _options = None - - ############################################################################ - # Constructeur - - def __init__(self, *optdescs): - """Initialiser l'objet avec un ensemble d'argument de la forme - - (options, longoptions, desc) - - où options est une chaine avec des lettres de la forme 'o' ou 'o:', - longoptions une liste de chaines de la forme 'option' ou 'option=', et - desc une chaine quelconque. - - Ce format est pour assurer la compatibilité avec la fonction - build_options() - """ - super(Options, self).__init__() - object.__setattr__(self, '_parseopt', '') - object.__setattr__(self, '_soptions', []) - object.__setattr__(self, '_loptions', []) - object.__setattr__(self, '_values', {}) - object.__setattr__(self, '_options', {}) - - self.add_option(VERBOSITY_OPTS, set_verbosity) - self.add_option(INTERACTION_OPTS, set_interaction) - for optdesc in optdescs: - options = filter(None, optdesc[:2]) - desc = optdesc[2:3] and optdesc[2] or None - self.add_option(options, None, desc) - - def __option(self, opt): - """Obtenir l'instance de Option correspondant à l'argument - """ - if isinstance(opt, Option): return opt - if not opt.startswith('-'): - if len(opt) == 1: opt = '-' + opt - else: opt = '--' + opt - option = self._options.get(opt, None) - if option is None: raise ValueError("Unknown option: %s" % opt) - return option - - def add_option(self, options=None, action=None, desc=None): - """Ajouter une option - - options peut être une chaine de l'une des formes suivantes: - - '+' arrêter l'analyse à la première non-option (configuration de gnu_getopt) - 'o', '-o', 'o:', '-o:' - option courte sans et avec argument - 'longo', '--longo', 'longo=', '--longo=' - option longue sans et avec argument - - options peut aussi être une liste de ces chaines - """ - default_name = None - for opt in filter(None, seqof(options, ())): - # traiter la configuration de l'analyse '+' - if opt.startswith('+'): - self._parseopt = '+' - opt = opt[1:] - if not opt: continue - - # nom par défaut - if default_name is None: - default_name = opt2name(opt) - - # option - option = Option(opt) - - # action - if isinstance(action, Action): - # action déjà spécifiée - pass - elif action is None: - # pas d'action: mettre à jour la variable d'après le nom de la - # première option - action = Options.SetValue(name=default_name) - elif isstr(action): - # mettre à jour la variable nommée d'après l'action - action = Options.SetValue(name=action) - elif callable(action): - # appeler l'action - action = Options.CallMethod(action) - else: - raise ValueError("Unsupported action: %s" % repr(action)) - - name = action.name - if name is None: name = default_name - - option.set_action(action, name) - - # si une précédente option est définie, il faut la remplacer - self._soptions = filter(lambda soption: not soption.same_optname(option), self._soptions) - self._loptions = filter(lambda loption: not loption.same_optname(option), self._loptions) - - # nouvelle option - if option.short: self._soptions.append(option) - else: self._loptions.append(option) - self._options[option.opt] = option - - # valeur initiale - # ne spécifier la valeur initiale que si elle n'existe pas déjà - if not self.has_value(option): - self.set_value(option, action.initial) - - return self - - ############################################################################ - # Gestion des valeurs - - def __getitem__(self, key): - return self._values[key] - def __setitem__(self, key, value): - self._values[key] = value - def __delitem__(self, key): - del self._values[key] - def get(self, key, default=None): - return self._values.get(key, default) - def __getattr__(self, key, default=_none): - try: - if default is _none: return self._values[key] - else: return self._values.get(key, default) - except KeyError: raise AttributeError(key) - def __setattr__(self, key, value): - if self._values.has_key(key): self._values[key] = value - else: return super(Options, self).__setattr__(key, value) - def __delattr__(self, key): - try: del self._values[key] - except KeyError: raise AttributeError(key) - - def get_value(self, option, default=_none): - """Obtenir la valeur correspondant à l'option - """ - option = self.__option(option) - return self.get(option.name, default) - def has_value(self, option): - option = self.__option(option) - return self._values.has_key(option.name) - def set_value(self, option, value): - """Spécifier la valeur correspondant à l'option - """ - option = self.__option(option) - self._values[option.name] = value - return True - - def update_value(self, option, value=_none): - option = self.__option(option) - if value is _none: - if option.takes_value: - raise ValueError("Required value") - else: - value = self.get_value(option, None) - if value is None: value = 0 - self.set_value(option, value + 1) - else: - self.set_value(option, value) - - ############################################################################ - # Exploitation - - def get_args(self, args=None): - """Analyser les arguments à la recherche des options valides. Si - args==None, prendre sys.argv[1:] - - @return (optvalues, args) - - optvalues est une liste de tuple (opt, value) correspondant à toutes les - options qui ont été analysées par gnu_getopt(). args est la liste des - arguments qui ne sont pas des options. - """ - if args is None: args = sys.argv[1:] - soptions = self._parseopt + ''.join([option.optdef for option in self._soptions]) - loptions = [option.optdef for option in self._loptions] - optvalues, args = gnu_getopt(args, soptions, loptions) - return filter(None, optvalues), args - - _parsed_names = None - - def parse(self, args=None, optvalues=None): - """Traiter les options analysées par get_args(). Si optvalues==None, - analyser les arguments de args avec get_args() d'abord. - - @return (roptvalues, args) - - optvalues est une liste de tuple (opt, value) correspondant à toutes les - options qui ont été analysées, mais n'ont pas pu être traitées par cet - objet. - - args est la liste des arguments qui ne sont pas des options. - """ - self._parsed_names = {} - if optvalues is None: optvalues, args = self.get_args(args) - roptvalues = [] - for opt, value in optvalues: - option = self.__option(opt) - self._parsed_names[option.name] = True - if not option.takes_value: value = _none - if option.action(option, value, self) == False: - roptvalues.append((opt, value)) - self.update_value(option, value) - return roptvalues, args - - def was_parsed(self, name): - """Indiquer si une option correspondant à la variable name a été - mentionnée sur la ligne de commande. - """ - if self._parsed_names is None: return False - return self._parsed_names.has_key(name) diff --git a/lib/nulib/python/nulib/base.py b/lib/nulib/python/nulib/base.py deleted file mode 100644 index 6b28271..0000000 --- a/lib/nulib/python/nulib/base.py +++ /dev/null @@ -1,505 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Fonctions de base -""" - -__all__ = ('myself', 'mydir', 'myname', - 'Undef', - 'updated', 'updatem', 'odict', 'ncdict', - 'required', 'nlistf', 'snlistf', - 'isnum', 'isflt', 'isbool', 'isbytes', 'isunicode', 'isstr', - 'isseq', 'seqof', 'listof', 'firstof', - 'all_matches', 'one_match', - 'strip_nl', 'norm_nl', - 'make_getter', 'make_setter', 'make_deleter', 'make_prop', - 'getattrs', 'setattrs', 'delattrs', 'make_delegate', - ) - -import os, sys, re -from os import path -from types import IntType, LongType, FloatType, BooleanType -from types import StringType, UnicodeType, StringTypes - -# Enlever le répertoire courant de sys.path -try: from nutools_config import CLEAN_SYSPATH -except ImportError: CLEAN_SYSPATH = True -if CLEAN_SYSPATH: - def __clean_syspath(): - cwd = os.getcwd() - sys.path = filter(lambda p: p not in ('', '.', cwd), sys.path) - __clean_syspath() - del __clean_syspath - -# emplacement du script courant -myself = path.abspath(sys.argv[0]) -mydir, myname = path.split(myself) - -# Fonctions diverses - -_undef = object() -class Undef(object): - def sa(self, value, kw, name, default=_undef): - """si value est Undef, récupérer la valeur avec le nom court name dans kw - """ - if default is _undef: default = self - if value is self and name is not None: value = kw.pop(name, self) - if value is self: value = default - return value - - def __nonzero__(self): - return False - def __len__(self): - return 0 - def __lt__(self, other): - if other: return True - else: return False - def __le__(self, other): - return True - def __eq__(self, other): - if other: return False - else: return True - def __ne__(self, other): - if other: return True - else: return False - def __gt__(self, other): - if other: return False - else: return True - def __ge__(self, other): - return True - def __repr__(self): - return 'Undef' - def __call__(self): - """créer une nouvelle instance de Undef, pratique pour un module qui veut - utiliser sa propre valeur différente de la valeur globale - """ - return self.__class__() -Undef = Undef() - -def updated(dict=None, **kw): - """Retourner une copie de dict mise à jour avec les éléments de kw - """ - if dict is None: dict = {} - else: dict = dict.copy() - dict.update(kw) - return dict - -def updatem(dict=None, *dicts): - """Mets à jour dict avec les dictionnaires dicts, et retourner dict - """ - if dict is None: dict = {} - for kw in dicts: dict.update(kw) - return dict - -class odict(dict): - """dictionnaire qui supporte aussi l'accès aux propriétés comme des attributs - """ - def __init__(self, dict=None, **kw): - super(odict, self).__init__(**updated(dict, **kw)) - - def __getattr__(self, name): - try: return self[name] - except KeyError: raise AttributeError(name) - - def __setattr__(self, name, value): - if name in self.__dict__: self.__dict__[name] = value - else: self[name] = value - - def __delattr__(self, name): - try: del self[name] - except KeyError: raise AttributeError(name) - - def copy(self): - return self.__class__(super(odict, self).copy()) - -_none = object() -class ncdict(odict): - """dictionnaire dont les clés sont insensibles à la casse - """ - def __init__(self, dict=None, **kw): - super(ncdict, self).__init__(**updated(dict, **kw)) - - def __getitem__(self, key): - if isstr(key): key = key.lower() - return super(ncdict, self).__getitem__(key) - - def __setitem__(self, key, value): - if isstr(key): key = key.lower() - return super(ncdict, self).__setitem__(key, value) - - def __delitem__(self, key): - if isstr(key): key = key.lower() - return super(ncdict, self).__delitem__(key) - - def __getattr__(self, key): - if isstr(key): key = key.lower() - return super(ncdict, self).__getattr__(key) - - def __setattr__(self, key, value): - if isstr(key): key = key.lower() - return super(ncdict, self).__setattr__(key, value) - - def __delattr__(self, key): - if isstr(key): key = key.lower() - return super(ncdict, self).__delattr__(key) - - def has_key(self, key): - if isstr(key): key = key.lower() - return super(ncdict, self).has_key(key) - - def get(self, key, default=_none): - if isstr(key): key = key.lower() - if default is _none: return super(ncdict, self).get(key) - else: return super(ncdict, self).get(key, default) - -def _itemprop(i, name): - def getter(self): - return self._values[i] - def setter(self, value): - validator = self.VALIDATORS.get(name, None) - if validator is not None: value = validator(value) - self._values[i] = value - return property(getter, setter) - -def _fix_module(cls): - try: cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): pass - return cls - -def required(validator, name=None): - if name is None: name = "The value" - def wrapper(value): - value = validator(value) - if value is None: raise ValueError("%s is required" % name) - else: return value - return wrapper - -def nlistf(name, *attrs, **kw): - name = str(name) - # attributs - if len(attrs) == 1 and isstr(attrs[0]): - attrs = attrs[0].replace(',', ' ').split() - attrs = tuple(map(str, attrs)) - # validateurs - validators = {} - for attr, validator in kw.iteritems(): - if attr not in attrs: - raise ValueError("Invalid validator attribute: %s" % attr) - validators[attr] = validator - - template = ["""class %(name)s(object): - __slots__ = ('_values') - ATTRS = None - VALIDATORS = None - def reset(self): - "Reinitialiser toutes les valeurs a None" - self._values = [None] * len(self.ATTRS) - return self - def replace(self, *values, **kw): - "Modifier des valeurs specifiques" - for i, attr in enumerate(self.ATTRS[:len(values)]): setattr(self, attr, values[i]) - for attr, value in kw.iteritems(): setattr(self, attr, value) - return self - def init(self, *values, **kw): - "Modifier toutes les valeurs de cet objet. Les valeurs non specifiees recoivent None." - return self.reset().replace(*values, **kw) - def __init__(self, *values, **kw): self.init(*values, **kw) - def inito(self, o): - "Modifier toutes les valeurs de cet objet en les prenant depuis les attributs de l'objet o." - for attr in self.ATTRS: setattr(self, attr, getattr(o, attr, None)) - def update(self, d): - "Mettre a jour le dictionnaire d avec les valeurs de cet objet" - for attr in self.ATTRS: d[attr] = getattr(self, attr) - def updateo(self, o): - "Mettre a jour les attributs de l'objet o avec les valeurs de cet objet." - for attr in self.ATTRS: setattr(o, attr, getattr(self, attr)) - def asdict(self): return dict(zip(self.ATTRS, self._values)) - def __repr__(self): return repr(self.asdict()) - def __len__(self): return len(self._values) - def __getitem__(self, key): return self._values.__getitem__(key) - def __setitem__(self, key, value): self._values.__setitem__(key, value) - def __iter__(self): return self._values.__iter__() - def __contains__(self, item): return self._values.__contains__(item)"""] - for i, attr in enumerate(attrs): - template.append(" %s = itemprop(%i, '%s')" % (attr, i, attr)) - template = "\n".join(template) % locals() - namespace = dict(itemprop=_itemprop) - try: exec template in namespace - except SyntaxError, e: raise SyntaxError('%s:\n%s' % (e.message, template)) - - cls = namespace[name] - cls.ATTRS = attrs - cls.VALIDATORS = validators - return _fix_module(cls) - -def snlistf(base, name, *attrs, **kw): - name = str(name) - # attributs - if len(attrs) == 1 and isstr(attrs[0]): - attrs = attrs[0].replace(',', ' ').split() - attrs = tuple(map(str, attrs)) - allattrs = base.ATTRS + attrs - # validateurs - validators = base.VALIDATORS.copy() - for attr, validator in kw.iteritems(): - if attr not in allattrs: - raise ValueError("Invalid validator attribute: %s" % attr) - validators[attr] = validator - - template = ["""class %(name)s(base): - __slots__ = () - ATTRS = None - VALIDATORS = None"""] - basei = len(base.ATTRS) - for i, attr in enumerate(attrs): - template.append(" %s = itemprop(%i, '%s')" % (attr, basei + i, attr)) - template = "\n".join(template) % locals() - namespace = dict(base=base, itemprop=_itemprop) - try: exec template in namespace - except SyntaxError, e: raise SyntaxError('%s:\n%s' % (e.message, template)) - - cls = namespace[name] - cls.ATTRS = allattrs - cls.VALIDATORS = validators - return _fix_module(cls) - -def isnum(i): - """Tester si i est une valeur numérique (int ou long) - """ - return type(i) in (IntType, LongType) -def isflt(f): - """Tester si f est une valeur numérique flottante (float) - """ - return type(f) is FloatType -def isbool(b): - """Tester si b est une valeur booléenne - """ - return type(b) is BooleanType -def isseq(t): - """Tester si t est une séquence (list ou tuple) - """ - return isinstance(t, list) or isinstance(t, tuple) -def seqof(seq, ifNone=Undef, nocopy=False): - """Retourner une séquence. - Si seq est une séquence, retourner une copie de l'objet si nocopy==False, - sinon l'objet lui-même. - Si seq==None: si ifNone est défini, retourner ifNone, sinon un tuple vide. - Sinon, retourner le tuple (seq,) - """ - if isseq(seq): - if nocopy: return seq - else: return seq[:] - elif seq is None: - if ifNone is Undef: return () - else: return ifNone - else: return (seq,) -def listof(seq, ifNone=Undef): - """Retourner une liste. - Si seq est une séquence, retourner la liste correspondante - Si seq==None: si ifNone est défini, retourner ifNone, sinon une liste vide. - Sinon, retourner la liste [seq] - """ - if seq is None: - if ifNone is Undef: return [] - else: return ifNone - elif isseq(seq): return list(seq) - else: return [seq] -def firstof(seq): - """Retourner le premier élément de la séquence. - Si seq n'est pas une séquence, retourner l'objet lui-même. - Si seq est une séquence vide, retourner None. - """ - if isseq(seq): return seq[0:1] and seq[0] or None - else: return seq -def isbytes(s): - """Tester si s est une valeur chaine (str) - """ - return type(s) is StringType -def isunicode(s): - """Tester si s est une valeur chaine (unicode) - """ - return type(s) is UnicodeType -def isstr(s): - """Tester si s est une valeur chaine (str ou unicode) - """ - return type(s) in StringTypes - -def all_matches(func, seq): - """Tester si tous les éléments de seq sont matchés par la fonction func. - """ - for item in seqof(seq): - if not func(item): return False - return True - -def one_match(func, seq): - """Tester si au moins un des éléments de seq est matché par la fonction - func. - """ - for item in seqof(seq): - if func(item): return True - return False - -def strip_nl(s): - """Enlever le caractère de fin de ligne de s: soit \\n, soit \\r, soit \\r\\n - """ - if s is None: return None - elif s.endswith("\r\n"): s = s[: - 2] - elif s.endswith("\n"): s = s[: - 1] - elif s.endswith("\r"): s = s[: - 1] - return s - -RE_NL = re.compile(r'(?:\r?\n|\r)') -def norm_nl(s, nl="\\n"): - """Transformer tous les caractères de fin de ligne en \\n - """ - if s is None: return None - else: return RE_NL.sub(nl, s) - -def make_getter(name): - return lambda self: getattr(self, name) -def make_setter(name, validator=None): - if validator is None: - return lambda self, value: setattr(self, name, value) - else: - return lambda self, value: setattr(self, name, validator(value)) - -def make_deleter(name): - return lambda self: delattr(self, name) - -def make_prop(name, value=None, getter=True, setter=True, deleter=False, validator=None): - """Retourne un tuple facilitant la création d'une propriété protégée par - des accesseurs. - - Voici un exemple d'usage: - - class C: - _name, name, get_name, set_name = make_prop('_name', 'Default value') - - @return: (value, property, getter_func, setter_func, deleter_func) - """ - accessors = {} - if getter in (False, None): pass - elif getter is True: getter = make_getter(name) - if getter: accessors['fget'] = getter - if setter in (False, None): pass - elif setter is True: setter = make_setter(name, validator) - elif validator is not None: - _setter = setter - setter = lambda self, value: _setter(self, validator(value)) - if setter: accessors['fset'] = setter - if deleter in (False, None): pass - elif deleter is True: deleter = make_deleter(name) - if deleter: accessors['fdel'] = deleter - result = [value, property(**accessors)] - if getter: result.append(accessors['fget']) - if setter: result.append(accessors['fset']) - if deleter: result.append(accessors['fdel']) - return tuple(result) - -def __check_names(names): - if not names: raise AttributeError("The attribute name is required") - -def getattrs(obj, names, strict=False): - u"""Soit un objet obj, et un nom de la forme "attr0.attr1....", - retourner l'objet obtenu avec l'expression obj.attr0.attr1.... - - @param strict: on requière que toute l'expression soit parcouru jusqu'à la - fin. Sinon, arrêter dès que le résultat de l'expression est None. - """ - if not names: return obj - if not isseq(names): names = names.split(".") - __check_names(names) - value = obj - for i in range(len(names)): - name = names[i] - if value is None: - if strict: - if i > 0: path = "obj." + ".".join(names[:i]) - else: path = "None" - raise AttributeError("%s instance has no value '%s'" % (path, name)) - else: break - value = getattr(value, name) - return value - -def setattrs(obj, names, value): - u"""Soit un objet obj, et un nom de la forme "attr0.attr1....", - effectuer l'équivalent de l'opération: - - obj.attr0.attr1.... = value - """ - if not isseq(names): names = names.split(".") - __check_names(names) - obj = getattrs(obj, names[:-1], True) - setattr(obj, names[-1], value) - -def delattrs(obj, names): - u"""Soit un objet obj, et un nom de la forme "attr0.attr1....", - effectuer l'équivalent de l'opération: - - del obj.attr0.attr1.... - """ - if not isseq(names): names = names.split(".") - __check_names(names) - obj = getattrs(obj, names[:-1], True) - delattr(obj, names[-1]) - -def make_delegate(names, getter=True, setter=True, deleter=False): - if getter is True: - def getter(self): - return getattrs(self, names, True) - if setter is True: - def setter(self, value): - setattrs(self, names, value) - if deleter is True: - def deleter(self): - delattrs(self, names) - - accessors = {} - if getter: accessors['fget'] = getter - if setter: accessors['fset'] = setter - if deleter: accessors['fdel'] = deleter - return property(**accessors) - -def get__all__(module): - """Retourner la valeur __all__ d'un module, ou la construire si cette - valeur n'est pas définie. - - @rtype: tuple - """ - all = getattr(module, '__all__', None) - if all is None: - all = [] - for key in module.__dict__.keys(): - if key[0] != '_': all.append(key) - return tuple(all) - -def import__module__(module_name, globals, locals=None, name=None): - """Importer dans globals le module nommé module_name, en le nommant name. - - Par défaut, name est le nom de base du module. par exemple, le module - "a.b.c" sera importé sous le nom "c" - """ - module = __import__(module_name, globals, locals) - basenames = module_name.split('.') - for basename in basenames[1:]: - module = getattr(module, basename) - - if name is None: name = basenames[-1] - globals[name] = module - return [name] - -def import__all__(module_name, globals, locals=None, *names): - """Importer dans globals tous les objets du module nommé module_name - mentionnés dans names. Si names est vides, tous les objets sont importés - comme avec 'from module import *' - """ - module = __import__(module_name, globals, locals) - basenames = module_name.split('.') - for basename in basenames[1:]: - module = getattr(module, basename) - - if not names: names = get__all__(module) - __all__ = [] - for name in names: - globals[name] = getattr(module, name, None) - __all__.append(name) - return __all__ diff --git a/lib/nulib/python/nulib/config.py b/lib/nulib/python/nulib/config.py deleted file mode 100644 index 101048e..0000000 --- a/lib/nulib/python/nulib/config.py +++ /dev/null @@ -1,876 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Fonctions utilitaires pour lire des fichiers de configuration. - -Dans un fichier de configuration, l'on reconnait des lignes de la forme:: - - [comment][prefix]varname=value - -value peut être placé entre double quotes ou simple quotes. Elle peut s'étendre sur -plusieurs lignes si elle est mise entre quotes, ou si elle se termine par \ -""" - -__all__ = ( - 'ConfigFile', 'ShConfigFile', 'PListFile', - 'ShConfig', -) - -import os, string, re, types, shlex -from os import path - -from .base import odict, make_prop, isseq, seqof, firstof -from .uio import _s, _u -from .files import TextFile -from .formats import unicodeF - -#################### -# gestion des commentaires - -re_comments = { - 'shell': re.compile(r'[ \t]*#+'), - 'conf': re.compile(r"[ \t]*;+"), - 'C': re.compile(r'[ \t]*//+'), - 'visual basic': re.compile(r"[ \t]*'+"), - 'wincmd': re.compile(r'[ \t]*(?:r|R)(?:e|E)(?:m|M)'), - } - -def is_comment(s, type=None): - """Retourner vrai si s un commentaire (c'est à dire si la ligne commence par - un des styles de commentaires supportés) - """ - comment_types = type is None and re_comments.values() or [re_comments[type]] - for comment_type in comment_types: - if comment_type.match(s): return True - return False - -#################### -# gestion des fichiers de configuration - -_marker = object() - -class ConfigFile(TextFile): - r"""Un fichier de configuration, que l'on doit lire sous Python, et que l'on - doit partager éventuellement avec d'autres langages ou d'autres systèmes - d'exploitation. Par exemple, il peut s'agir d'un fichier de configuration - sous bash. - - Une valeur non quotée est trimée à droite et à gauche. Une valeur quotée - n'est jamais trimée. - - Une valeur quotée peut être suivie d'une valeur non quotée, et les deux sont - mergés. Mais une fois que l'on commence à parser une valeur non quotée, plus - aucun traitement n'est effectuée, ce qui fait qu'une valeur quotée ne peut - pas suivre une valeur non quotée (cf le "andme" ci-dessus). - - Ceci diffère du comportement de parseur plus évolués comme celui de bash. On - considère néanmoins que c'est une caractéristique, non un bug. XXX corriger - ce problème, ne serait-ce que pour supporter la lecture de fichiers tels que - var='value'\''with a quote' - - Tests - ===== - - >>> from StringIO import StringIO - >>> input = StringIO(r'''# comment - ... name=value - ... name2= value - ... name3 = value - ... qname="qvalue" - ... qname2=" qvalue " - ... qname3 = " qvalue " - ... qname4=" - ... multi-line - ... qvalue - ... " - ... fancy="\ - ... noNL\ - ... "foryou"andme" - ... quote='"' - ... quote2="\"" - ... quote3='\'' - ... quote4='\\' - ... quote5='\\\'' - ... quote6='\\\'remainder' - ... ''') - >>> from ulib.base.config import ConfigFile - >>> cf = ConfigFile(input) - >>> cf.get_string('name') - u'value' - >>> cf.get_string('name2') - u'value' - >>> cf.get_string('name3') - u'value' - >>> cf.get_string('qname') - u'qvalue' - >>> cf.get_string('qname2') - u' qvalue ' - >>> cf.get_string('qname3') - u' qvalue ' - >>> cf.get_string('qname4') - u'\n multi-line\n qvalue\n ' - >>> cf.get_string('fancy') - u'noNLforyouandme' - >>> cf.get_string('quote') - u'"' - >>> cf.get_string('quote2') - u'\\"' - >>> cf.get_string('quote3') - u"\\'" - >>> cf.get_string('quote4') - u'\\\\' - >>> cf.get_string('quote5') - u"\\\\\\'" - >>> cf.get_string('quote6') - u"\\\\\\'remainder" - - """ - - # valeurs lues dans le fichier de configuration - _items, items = make_prop('_items')[:2] - # valeurs par défaut - _defaults, defaults = make_prop('_defaults')[:2] - # expression régulière identifiant le préfixe des variables - _prefix, prefix = make_prop('_prefix', '')[:2] - # expression régulière identifiant pour le séparateur entre le nom de la - # variable et sa valeur. - _equals, equals = make_prop('_equals', r'\s*=')[:2] - # faut-il considérer les variables en commentaires? - _comment, comment = make_prop('_comment')[:2] - - ############################################################################ - # interface publique - - def __init__(self, file=None, defaults=None, - prefix=None, equals=None, comment=False, - raise_exception=True, lines=None): - """ - @param prefix une expression régulière identifiant un préfixe mentionné - avant chaque variable. par exemple, si prefix=='##@' et qu'on - cherche la variable value, alors la ligne ##@value est cherchée. - @param comment faut-il considérer les valeurs qui sont en commentaires? - Si oui, tout se passe comme si le commentaire n'existe pas. - @param defaults un ensemble de valeurs par défaut qui sont retournées si la - variable n'existe pas dans le fichier. - @param lines instance de Lines ou BLines permettant de décoder le contenu du - fichier. - """ - super(ConfigFile, self).__init__(file, raise_exception=raise_exception, lines=lines) - self._items = {} - self._defaults = defaults or {} - if prefix is not None: self._prefix = prefix - if equals is not None: self._equals = equals - self._comment = comment - - def __getitem__(self, name, default=_marker): - """Obtenir la valeur de la variable name, telle qu'elle a été lue. - Si c'est un tableau, retourner une liste. Sinon retourner une chaine. - - Si la variable n'est pas définie, retourner default. - """ - if not self._items.has_key(name): self._load_value(name) - if default is _marker: - if not self._items.has_key(name) and self._defaults.has_key(name): - return self._defaults[name] - return self._items[name] - return self._items.get(name, default) - get = __getitem__ - - def __setitem__(self, name, value): - self._items[name] = value - - def __delitem__(self, name): - del self._items[name] - - def has_key(self, name): - try: self.__getitem__(name) - except KeyError: return False - else: return True - - def get_string(self, name, default=_marker): - """Obtenir la valeur de la variable name. Si la variable est un tableau, - retourner la première valeur de ce tableau. Retourner None si le tableau - est vide. - """ - value = self.__getitem__(name, default) - if isseq(value): return firstof(value) - else: return value - - def get_lines(self, name, strip=False, default=_marker): - """Obtenir une valeur avec get_string(), et la spliter sur le caractère - de fin de ligne. Retourner la liste des lignes. - - si strip est vrai, on strip toutes les lignes puis on enlève les - lignes vides. - """ - lines = self.get_string(name, default) - if not isseq(lines): lines = re.split(r'(?:\r?)\n', lines) - if strip: lines = filter(None, map(string.strip, lines)) - return lines - - def get_paths(self, name, strip=False, default=_marker): - """Obtenir une valeur avec get_string(), la splitter sur le caractère - 'os.path.pathsep'. Retourner la liste des chemins. - - si strip est vrai, on strip toutes les valeurs puis on enlève les - valeurs vide. - """ - paths = self.get_string(name, default) - if not isseq(paths): paths = paths.split(path.pathsep) - if strip: paths = filter(None, map(string.strip, paths)) - return paths - - def get_array(self, name, default=_marker): - """Obtenir la liste des valeurs de la variable name. Si name est une - valeur scalaire, retourner une liste d'un seul élément. - """ - return list(seqof(self.__getitem__(name, default))) - - ############################################################################ - # partie privée - - RE_ANTISLASHES = re.compile(r'\\+$') - def _is_cont(self, value): - """Tester si value doit être fusionné avec la ligne suivante à cause de - la présence d'un caractère de continuation de ligne. - - Par défaut, on teste si value se termine par un nombre impair de '\\' - """ - mo = self.RE_ANTISLASHES.search(value) - if mo is None: return False - return len(mo.group()) % 2 == 1 - - def _strip_cont(self, value): - """Enlever le caractère de continuation de ligne de value. On assume que - self._is_cont(value) est vrai. - """ - return value[:-1] - - def _merge_cont(self, index, value, sep=''): - """Merger value située à la ligne index, et la ligne suivante, en les - séparant par sep. On assume que self._is_cont(value) est vrai, et que le - caractère de continuation a été enlevé avec self._strip_cont(value) - - Dans la valeur de retour, eof vaut True si la fin de fichier est - rencontrée. - - @return (index+1, merged_value, eof) - """ - if index + 1 < len(self.lines): - index += 1 - value = value + sep + self.lines[index] - eof = False - else: - eof = True - return index, value, eof - - def _unescape(self, value, quote=''): - """Traiter les séquences d'échappement dans une valeur scalaire. Si la - valeur était quotée, quote contient la valeur du caractère ("'", '"' ou - ''). Par défaut, ne rien faire. - - Cette fonction doit être surchargée en fonction du type de fichier de - configuration que l'on lit. - - La valeur quote=='' signifie que la valeur n'était pas quotée, mais il - peut quand même y avoir des séquences d'échappement à traiter. - """ - return value - - def _load_value(self, name): - """charger la valeur d'une variable depuis le fichier. - - XXX rendre le parcours plus robuste: faire attention à ne pas lire une - valeur à l'intérieur d'une autre valeur. Par exemple: - - var1="\ - var2=bad - " - var2=good - - Avec l'implémentaion actuelle, si on demande la valeur de var2, on - obtient bad. Une façon de corriger cela de parcourir *tout* le fichier, - de lire les valeurs non analysées de chaque variable au fur et à mesure, - puis de les placer en cache. ensuite, _load_value() se contenterai - d'analyser les valeurs dans le cache. - - @return None si la valeur n'est pas trouvée dans le fichier. Sinon, - retourner une valeur scalaire ou une séquence en fonction du type de la - valeur. - """ - # le groupe 1 sera testé pour voir si c'est un commentaire - re_varname = re.compile(r'(.*)%s%s%s' % (self._prefix, name, self._equals)) - re_value = re.compile(r'.*%s%s%s(.*)' % (self._prefix, name, self._equals)) - - indexes = self.grepi(re_varname) - if not indexes: return None - - # trouver d'abord la ligne appropriée - comment = '' - for index in indexes: - comment = re_varname.match(self.lines[index]).group(1) - if is_comment(comment): - # si la valeur est en commentaire, ne l'accepter que si - # self._comment est vrai - if not self._comment: - continue - # nous avons trouvé l'index de la ligne - break - else: - # aucune ligne n'a été trouvée - return - - # ensuite lire la valeur - value = re_value.match(self.lines[index]).group(1) - value = self._parse_logic(index, value) - self._items[name] = value - - def _parse_logic(self, index, value): - """Implémenter la logique d'analyse de la valeur d'une variable. - - Il faut reimplémenter cette méthode si on veut modifier le type de - valeurs supportées. _parse_scalar() permet d'analyser une valeur simple, - _parse_array() permet d'analyser un tableau de valeurs. - - Par défaut, on ne supporte que les valeurs scalaire. Utiliser - ShConfigFile pour supporter les tableaux. - """ - value = value.lstrip() # ignorer les espaces avant la valeur - return self._parse_scalar(index, value) - - ## valeurs scalaires simples - - RE_SPACES = re.compile(r'\s+') - def _parse_scalar(self, index, value): - remainder = value - value = '' - lstrip = None - rstrip = None - while remainder: - mo = self.RE_SPACES.match(remainder) - if mo is not None: - # ne pas supprimer les espaces entre les valeurs - remainder = remainder[mo.end():] - value += mo.group() - # XXX supporter de spécifier le type de commentaires valides dans ce - # fichier de configuration. A cet endroit, il faudrait pouvoir - # éliminer les commentaires qui sont sur la ligne. évidemment, ce ne - # serait pas forcément approprié suivant la configuration. exemple: - # REM pour un fichier cmd n'est valide qu'en début de ligne. - elif self._is_quoted(remainder): - # valeur quotée. pas de strip - if lstrip is None: lstrip = False - rstrip = False - index, next_value, remainder = self._parse_quoted(index, remainder) - value += self._unescape(next_value) - else: - # valeur non quotée. lstrip si en premier. rstrip si en dernier - if lstrip is None: lstrip = True - rstrip = True - index, next_value, remainder = self._parse_value(index, remainder) - value += self._unescape(next_value) - if lstrip: value = value.lstrip() - if rstrip: value = value.rstrip() - return value - - RE_VALUE = re.compile('[^\\s\'"]*') - def _parse_value(self, index, value, pattern=None): - """Parser une valeur simple non quotée à partir de value (qui se trouve - à la position index) et des lignes suivant index si la ligne se termine - par '\\'. - - @return index, value, remainder - """ - while self._is_cont(value): - value = self._strip_cont(value) - index, value, eof = self._merge_cont(index, value) - if eof: break - if pattern is None: pattern = self.RE_VALUE - mo = pattern.match(value) - if mo is None: - return index, '', value - else: - remainder = value[mo.end():] - value = value[:mo.end()] - return index, value, remainder - - ## valeurs scalaires quotées - - def _is_quoted(self, value): - """Tester si value est le début d'une valeur quotée. Ignorer les espaces - avant la quote. - """ - return value.lstrip()[:1] in ('"', "'") - - def _search_next_quote(self, value, re_quote): - """Chercher un match de re_quote dans value, qui ne soit pas précédé par - un nombre impair de '\\'. - """ - pos = 0 - while True: - mo = re_quote.search(value, pos) - if mo is None: return None - if self._is_cont(value[:mo.start()]): - # nombre impair de '\\', la quote est mise en échappement - pos = mo.end() - else: - return mo - - RE_QUOTE = re.compile(r'[\'"]') - def _parse_quoted(self, index, value): - """Parser une valeur quotée à partir de value (qui se trouve à la - position index) et des lignes suivant index. - - value *doit* commencer par la quote. si _is_quoted(value) est vrai, il - faut enlever les espaces éventuels au début de value avant de la passer - à cette méthode. - - @return index, value, remainder - """ - if self.RE_QUOTE.match(value) is None: - raise ValueError("value must start with a quote, got %s" % repr(_s(value))) - quote, value = value[:1], value[1:] - re_quote = re.compile(quote) - mo = self._search_next_quote(value, re_quote) - while mo is None: - if self._is_cont(value): - value = self._strip_cont(value) - index, value, eof = self._merge_cont(index, value) - else: - index, value, eof = self._merge_cont(index, value, self.nl) - mo = self._search_next_quote(value, re_quote) - if eof: break - if mo is None: - # valeur quotée, mais mal terminée. on fait comme si on a rien vu - return index, value, '' - else: - remainder = value[mo.end():] - value = value[:mo.start()] - return index, value, remainder - - ## tableaux - - def _is_array(self, value): - """Tester si value est le début d'un tableau. Ignorer les espaces avant - le tableau. - """ - return False - - def _parse_array(self, index, value): - """Parser un tableau à partir de value (qui se trouve à la position - index) et des lignes suivant index. - - value *doit* commencer par le tableau. si _is_array(value) est vrai, il - faut enlever les espaces éventuels au début de value avant de la passer - à cette méthode. - """ - return [] - -class ShConfigFile(ConfigFile): - r"""Un fichier de configuration qui est susceptible d'être lu aussi par bash - (ou tout autre shell sh-like). On supporte l'évaluation de variables, et - certaines séquences d'échappement pour des valeurs quotées. - - Il y a certaines limitations: lors de la lecture des valeurs des variables, - les caractères sont traduits suivant la correspondance suivante: - - \ en fin de ligne: continuer sur la ligne suivante - \" " - \\ \ - \$ $ - - La séquence \` n'est pas traduite. En effet, pour que cela aie un sens, il - faudrait que l'on traduise aussi `cmd` - - De plus, on ne supporte que les variables de la forme $var et ${var} - - Tests - ===== - - >>> from StringIO import StringIO - >>> input = StringIO(r'''# comment - ... var1=value - ... var2="value" - ... var3='value' - ... var4=(value1 "value2" 'value3') - ... var5=( - ... value1 - ... "value2\ - ... " 'value3' - ... ) - ... var6=() - ... var7=( ) - ... var8=( - ... ) - ... ''') - >>> from ulib.base.config import ShConfigFile - >>> cf = ShConfigFile(input) - >>> cf.get_string('var1') - u'value' - >>> cf.get_string('var2') - u'value' - >>> cf.get_string('var3') - u'value' - >>> cf.get_string('var4') - u'value1' - >>> cf.get_array('var4') - [u'value1', u'value2', u'value3'] - >>> cf.get_array('var5') - [u'value1', u'value2', u'value3'] - >>> [cf.get_array(name) for name in ('var6', 'var7', 'var8')] - [[], [], []] - >>> cf.get_array('var1') - [u'value'] - >>> cf.get_string('var4') - u'value1' - >>> cf.get_string('var6') is None - True - """ - - RE_VAR = re.compile(r'\$(?:\{([^}]+)\}|(\w+))') - TRANS_MAP = {r'\"': '"', r'\\': '\\', r'\$': '$'} - - def __convert(self, value): - # XXX rendre la conversion plus robuste: veiller à l'ordre ('\\\\' en - # dernier...), et ne faire la conversion que pour un nombre impaire de - # '\\'. - for s, r in self.TRANS_MAP.items(): - value = value.replace(s, r) - return value - - def _unescape(self, value, quote=''): - """convertir une valeur quotée, suivant les règles de bash. - quote peut valoir "'", '"', '' - """ - # aucune traduction entre '' - if quote == "'": return value - # sinon appliquer les règles standards. notamment, remplacer $var et - # ${var} par self._items["var"] ou os.environ["var"] - splited = self.RE_VAR.split(value) - value = self.__convert(splited[0]) - splited = splited[1:] - while splited: - var0 = splited[0] - var1 = splited[1] - text = splited[2] - splited = splited[3:] - var = var0 or var1 - if self.has_key(var): value = value + self.get_string(var) - else: value = value + os.environ.get(var, "") - value = value + self.__convert(text) - return value - - def _parse_logic(self, index, value): - value = value.lstrip() # ignorer les espaces avant la valeur - if self._is_array(value): return self._parse_array(index, value) - else: return self._parse_scalar(index, value) - - ## tableaux - - def _is_array(self, value): - """Tester si value est le début d'un tableau. - """ - return value.strip()[:1] == '(' - - RE_ARRAY_VALUE = re.compile('[^\\s\'")]*') - def _parse_next_scalar(self, index, value): - """Parser la prochaine valeur scalaire - XXX à faire - @return index, value, remainder - """ - remainder = value - value = '' - lstrip = None - rstrip = None - while remainder: - if self.RE_SPACES.match(remainder) is not None: - # les valeurs sont séparées par des espaces - break - # XXX cf ConfigFile._parse_scalar pour la gestion des commentaires - elif self.RE_EOA.match(remainder) is not None: - # fin de tableau - break - elif self._is_quoted(remainder): - # valeur quotée. pas de strip - if lstrip is None: lstrip = False - rstrip = False - index, next_value, remainder = self._parse_quoted(index, remainder) - value += self._unescape(next_value) - else: - # valeur non quotée. lstrip si en premier. rstrip si en dernier - if lstrip is None: lstrip = True - rstrip = True - index, next_value, remainder = self._parse_value(index, remainder, self.RE_ARRAY_VALUE) - value += self._unescape(next_value) - if lstrip: value = value.lstrip() - if rstrip: value = value.rstrip() - return index, value, remainder - - RE_SOA = re.compile(r'\(') - RE_EOA = re.compile(r'\)') - def _parse_array(self, index, value): - """Parser un tableau à partir de value (qui se trouve à la position - index) et des lignes suivant index. - - @return index, values, remaining - """ - if self.RE_SOA.match(value) is None: - raise ValueError("value must start with '(', got %s" % repr(_s(value))) - remainder = value[1:] - values = [] - eoa = False # end of array - while True: - if not remainder: - # nous n'avons pas encore rencontré la fin du tableau. Lire les - # lignes jusqu'à ce que nous trouvions ce qui est nécessaire - index, remainder, eof = self._merge_cont(index, remainder) - if eof: break - # ignorer les espaces entre les valeurs - mo = self.RE_SPACES.match(remainder) - if mo is not None: - remainder = remainder[mo.end():] - continue - # tester si on arrive à la fin du tableau - if self.RE_EOA.match(remainder) is not None: - remainder = remainder[1:] - eoa = True - break - # parser une valeur scalaire - index, next_value, remainder = self._parse_next_scalar(index, remainder) - values.append(next_value) - # ici, eoa vaut True si le tableau a été terminé proprement. - # sinon, on fait comme si on a rien vu. - return values - -_debug = False -def _print_debug(s): - if _debug: print s - -class PListFile(TextFile): - def readlines(self, raise_exception=True, close=True): - TextFile.readlines(self, raise_exception, close) - - self.items = None - self.list = None - self.value = None - - if self.is_valid(): - if self.lines and self.lines[0][:5] == ' 11: - month -= 12 - year += 1 - while month < 0: - month += 12 - year -= 1 - month += 1 - return year, month -MONTHDAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] -def _monthdays(year, month, offset=0): - year, month = _fix_month(year, month + offset) - if month == 2 and _isleap(year): leapday = 1 - else: leapday = 0 - return MONTHDAYS[month] + leapday -def _fix_day(year, month, day): - # on assume que month est déjà "fixé" - day -= 1 - while day > _monthdays(year, month) - 1: - day -= _monthdays(year, month) - year, month = _fix_month(year, month + 1) - while day < 0: - year, month = _fix_month(year, month - 1) - day += _monthdays(year, month) - day += 1 - return year, month, day -def _fix_date(day, month, year): - year, month = _fix_month(year, month) - year, month, day = _fix_day(year, month, day) - return day, month, year - -MONTHNAMES = [u"Janvier", u"Février", u"Mars", u"Avril", u"Mai", u"Juin", - u"Juillet", u"Août", u"Septembre", u"Octobre", u"Novembre", u"Décembre", - ] -MONTHNAMES3 = [u"Jan", u"Fév", u"Mar", u"Avr", u"Mai", u"Jun", - u"Jul", u"Aoû", u"Sep", u"Oct", u"Nov", u"Déc", - ] -MONTHNAMES1 = [u"J", u"F", u"M", u"A", u"M", u"J", - u"J", u"A", u"S", u"O", u"N", u"D", - ] - -class Date(object): - """Un wrapper pour 'datetime.date'. - - Attention! Cet objet est mutable, il ne faut donc pas l'utiliser comme clé - dans un dictionnaire. - """ - _d = None - - def __init__(self, day=None, month=None, year=None, t=None): - """Initialiser l'objet. - - Dans l'ordre, les champs considérés sont: - - day si c'est une instance de Date ou datetime.date - - t le nombre de secondes depuis l'epoch, comme retourné par - time.time(). Cette valeur est fusionnée avec les valeurs numériques - day, month, year. - """ - if day is not None and not isnum(day) and month is None and year is None and t is None: - if isinstance(day, pydatetime): day = day.date() - if isinstance(day, pydate): self._d = day - elif isinstance(day, Date): self._d = day._d - if self._d is None: - if t is None: t = time() - y, m, d = localtime(t)[:3] - if year is None: year = y - if month is None: month = m - if day is None: day = d - day, month, year = _fix_date(day, month, year) - self._d = pydate(year, month, day) - - date = property(lambda self: self._d) - year = property(lambda self: self._d.year) - month = property(lambda self: self._d.month) - day = property(lambda self: self._d.day) - - # nombre de jours du mois - monthdays = property(lambda self: MONTHDAYS[self.month]) - - def weekday(self): - """Retourner le jour de la semaine, de 0 (lundi) à 6 (dimanche) - """ - return self._d.weekday() - def isoweekday(self): - """Retourner le jour de la semaine, de 1 (lundi) à 7 (dimanche) - """ - return self._d.isoweekday() - def is_today(self): - """Tester si cette date est le jour d'aujourd'hui - """ - now = self.__class__()._d - date = self._d - return now.year == date.year and now.month == date.month and now.day == date.day - - def calday(self, show_month=False, show_year=False): - """Retourner 'day' si day != 1 and not show_month and not show_year, - 'day/month' si month != 1 and not show_year, - 'day/month/year' sinon - """ - day, month, year = self.day, self.month, self.year - if day != 1 and not show_month and not show_year: return _u(day) - elif month != 1 and not show_year: return u"%i/%i" % (day, month) - else: return u"%i/%i/%i" % (day, month, year) - - def monthname(self, format=None): - """Obtenir le nom du mois. - Si format est dans (1, 't', 'tiny'), retourner le nom sur 1 lettre. - Si format est dans (3, 's', 'small'), retourner le nom sur 3 lettres. - Sinon, retourner le nom complet. - """ - if format in (1, 't', 'tiny'): names = MONTHNAMES1 - elif format in (3, 's', 'small'): names = MONTHNAMES3 - else: names = MONTHNAMES - return names[self.month - 1] - - __monthname1 = lambda self: self.monthname(1) - __monthname3 = lambda self: self.monthname(3) - FORMAT_MAP = {'%Y': '%(y)04i', '%m': '%(m)02i', '%d': '%(d)02i', - '%H': '%(H)02i', '%M': '%(M)02i', '%S': '%(S)02i', - '%1m': __monthname1, '%3m': __monthname3, '%fm': monthname, - '%C': calday, - } - def format(self, format=None): - """Formater la date pour affichage. - - Les champs valides sont %Y, %m, %d qui correspondent à la date de cet - objet, %H, %M, %S qui valent toujours 0, et %1m, %3m, %fm, %C, qui - correspondent respectivement à self.monthname(1), self.monthname(3), - self.monthname(), self.calday(). - """ - if format is None: format = FR_DATEF - y, m, d, H, M, S = self.year, self.month, self.day, 0, 0, 0 - for fr, to in self.FORMAT_MAP.items(): - if callable(to): to = to(self) - format = format.replace(fr, to) - return format % locals() - - def set(self, day=None, month=None, year=None): - kw = {} - for name, value in [('day', day), ('month', month), ('year', year)]: - if value is not None: kw[name] = value - self._d = self._d.replace(**kw) - return self - - def set_weekday(self, weekday=0): - if self.weekday() != weekday: - day = self.day + weekday - self.weekday() - self.set(*_fix_date(day, self.month, self.year)) - return self - - def set_isoweekday(self, isoweekday=1): - if self.isoweekday() != isoweekday: - day = self.day + isoweekday - self.isoweekday() - self.set(*_fix_date(day, self.month, self.year)) - return self - - def __repr__(self): - return '%s(%i, %i, %i)' % (self.__class__.__name__, self.year, self.month, self.day) - def __str__(self): - return '%02i/%02i/%04i' % (self.day, self.month, self.year) - def __unicode__(self): - return u'%02i/%02i/%04i' % (self.day, self.month, self.year) - - def __eq__(self, other): return self._d == self._date(other, False) - def __ne__(self, other): return self._d != self._date(other, False) - def __lt__(self, other): - if other is None: return False - else: return self._d < self._date(other) - def __le__(self, other): - if other is None: return False - else: return self._d <= self._date(other) - def __gt__(self, other): - if other is None: return True - else: return self._d > self._date(other) - def __ge__(self, other): - if other is None: return True - else: return self._d >= self._date(other) - def __cmp__(self, other): - if other is None: return 1 - else: return cmp(self._d, self._date(other)) - def __hash__(self): return hash(self._d) - - def _date(self, d, required=True): - """Retourner l'instance de datetime.date correspondant à l'objet d. - """ - if isinstance(d, pydate): return d - elif isinstance(d, pydatetime): return d.date() - elif isinstance(d, Date): return d._d - elif required: raise ValueError("Expected datetime.date or Date instance, got %s" % repr(d)) - else: return None - - def _delta(self, td): - """Retourner l'instance de datetime.timedelta correspondant à l'objet td - """ - if isinstance(td, timedelta): return td - elif isnum(td): return timedelta(td) - else: raise ValueError("Expected number or datetime.delta instance got %s" % repr(td)) - - def _new(cls, d=None, t=None): - """Constructeur. d est une instance de Date ou datetime.date. t est un - nombre de secondes depuis l'epoch. - """ - if d is not None: - if isinstance(d, pydate): return cls(d.day, d.month, d.year) - elif isinstance(d, pydatetime): return cls(d.day, d.month, d.year) - elif isinstance(d, Date): return cls(d.day, d.month, d.year) - else: raise ValueError("Expected datetime.date or Date instance, got %s" % repr(d)) - elif t is not None: return cls(t=t) - else: return cls() - _new = classmethod(_new) - - def copy(self): - """Retourner une nouvelle instance, copie de cet objet - """ - return self._new(self._d) - - def replace(self, day=None, month=None, year=None): - """Retourner une nouvelle instance avec les champs spécifiés modifiés. - """ - kw = {} - for name, value in [('day', day), ('month', month), ('year', year)]: - if value is not None: kw[name] = value - return self._new(self._d.replace(**kw)) - - def __add__(self, other): return self._new(self._d + self._delta(other)) - __radd__ = __add__ - def add(self, days=1): return self + days - - def __sub__(self, other): return self._new(self._d - self._delta(other)) - __rsub__ = __sub__ - def sub(self, days=1): return self - days - - def diff(self, other): - """Retourner le nombre de jours de différences entre cette date et other - """ - delta = self._d - self._date(other) - return delta.days - - def __fix_weekday(self, date): - """Si date est après jeudi, retourner le début de la semaine - suivante, sinon retourner le début de la semaine courante. - """ - date = date.copy() - if date.weekday() > 3: - date = date.set_weekday(0) - date += 7 - else: - date.set_weekday(0) - return date - - def get_monthweeks(self, complete=True, only_debut=None): - """Retourner une liste de dates (debut, fin) correspondant aux débuts - et aux fins des semaine du mois de cet objet. - - Si only_debut==True, ne retourner que la liste de valeurs debut au lieu - des tuples (debut, fin). Par défaut only_debut==complete - - Si complete==True, on ne retourne que des semaines complètes: les dates - au début et à la fin du mois sont corrigées pour inclure les jours du - mois précédent et du mois suivant s'il y a au moins 4 jours dans le mois - courant. - - Sinon, les semaines du début et de la fin du mois peuvent être tronquées - et ne contiennent que les jours du mois. - """ - if only_debut is None: only_debut = complete - - first = self.copy().set(1) - monthdays = first.monthdays - last = first + monthdays - weeks = [] - if complete: - first = self.__fix_weekday(first) - last = self.__fix_weekday(last) - debut = first - while debut < last: - fin = debut + 6 - if only_debut: weeks.append(debut) - else: weeks.append((debut, fin)) - debut = fin + 1 - else: - last -= 1 - debut = first - while debut <= last: - fin = debut.copy().set_weekday(6) - if fin > last: fin = last - if only_debut: weeks.append(debut) - else: weeks.append((debut, fin)) - debut = fin + 1 - return weeks - -def isdate(d): - """Tester si d est une instance de Date - """ - return isinstance(d, Date) -def isanydate(d): - """Tester si d est une instance de Date, datetime.date ou datetime.datetime - """ - return isinstance(d, Date) or isinstance(d, pydate) or isinstance(d, pydatetime) - -RE_DATE_FR = re.compile(r'(\d+)(?:/(\d+)(?:/(\d+))?)?$') -RE_DATE_ISO = re.compile(r'(\d+)-(\d+)-(\d+)$') -def parse_date(s): - """Parser une chaine et retourner une instance de Date - """ - mof = RE_DATE_FR.match(s) - moi = RE_DATE_ISO.match(s) - if mof is not None: - year = mof.group(3) - month = mof.group(2) - day = mof.group(1) - elif moi is not None: - year = moi.group(1) - month = moi.group(2) - day = moi.group(3) - else: - raise ValueError("Invalid date format: %s" % _s(s)) - if year is not None: year = _fix_year(int(year)) - if month is not None: month = int(month) - if day is not None: day = int(day) - return Date(day, month, year) - -def ensure_date(d): - """Retourner une instance de Date, ou None si d==None. - - d peut être une intance de datetime.date, Date ou une chaine. - """ - if d is None: return None - elif isinstance(d, Date): return d - elif isinstance(d, pydate): return Date._new(d) - elif isinstance(d, pydatetime): return Date._new(d) - if not isstr(d): d = _s(d) - return parse_date(d) - -def _tzname(): - tz = time_mod.timezone - if tz > 0: s = "-" - else: s = "+" - tz = abs(tz) / 60 - h = tz / 60 - m = tz % 60 - return "%s%02i%02i" % (s, h, m) - -def rfc2822(time=None, gmt=True): - """Retourner la date au format rfc 2822. - - time est une date au format de time.time() - """ - if time is None: time = time_mod.time() - if gmt: - time = gmtime(time) - tzname = "+0000" - else: - time = localtime(time) - tzname = _tzname() - return "%s %s" % (asctime(time), tzname) - -class _DateSpecConstants: - """Constantes utilisées par les classes DateSpec et ses filles - """ - - # Contrainte - C = r'(?:!(w|n)(\d+))' - C_COUNT = 2 # nombre de groupes pour l'expression régulière C - C_OP = 0 # numéro relatif du groupe pour la valeur OP - C_WD = 1 # numéro relatif du groupe pour la valeur WEEKDAY - - # Spécification - I = r'(\d+)' - I_COUNT = 1 # nombre de groupes pour l'expression régulière I - I_VALUE = 0 # numéro relatif du groupe pour la valeur VALUE - - R = r'(?:(\d+)(?:\s*-\s*(\d+))?)' # Range - R_COUNT = 2 # nombre de groupes pour l'expression régulière R - R_FROM = 0 # numéro relatif du groupe pour la valeur FROM - R_TO = 1 # numéro relatif du groupe pour la valeur TO - - AOR = r'(?:(\*)|%s)' % R # AnyOrRange - AOR_COUNT = 1 + R_COUNT # nombre de groupes pour l'expression régulière AOR - AOR_R_POS = 1 # position du premier groupe de l'expression R dans AOR - AOR_ANY = 0 - AOR_FROM = AOR_R_POS + R_FROM # numéro relatif du groupe pour la valeur FROM - AOR_TO = AOR_R_POS + R_TO # numéro relatif du groupe pour la valeur TO - - S = r'(?:\+%s|w%s|%s)(?:\s*/\s*%s(?:\s*/\s*%s)?)?' % (I, R, AOR, AOR, AOR) - S_COUNT = I_COUNT + R_COUNT + 3 * AOR_COUNT # nombre de groupes pour l'expression régulière S - S_I_POS = 0 # position du premier groupe de l'expression I dans S - S_R_POS = S_I_POS + I_COUNT # position du premier groupe de l'expression R dans S - S_DAOR_POS = S_R_POS + R_COUNT # position du premier groupe de l'expression DAOR dans S - S_MAOR_POS = S_DAOR_POS + AOR_COUNT # position du premier groupe de l'expression DAOR dans S - S_YAOR_POS = S_MAOR_POS + AOR_COUNT # position du premier groupe de l'expression DAOR dans S - S_OFFSET = S_I_POS + I_VALUE # numéro relatif du groupe pour la valeur OFFSET - S_WD_FROM = S_R_POS + R_FROM # numéro relatif du groupe pour la valeur FROM de WD - S_WD_TO = S_R_POS + R_TO # numéro relatif du groupe pour la valeur TO de WD - S_D_ANY = S_DAOR_POS + AOR_ANY # numéro relatif du groupe pour la valeur ANY de D - S_D_FROM = S_DAOR_POS + AOR_FROM # numéro relatif du groupe pour la valeur FROM de D - S_D_TO = S_DAOR_POS + AOR_TO # numéro relatif du groupe pour la valeur TO de D - S_M_ANY = S_MAOR_POS + AOR_ANY # numéro relatif du groupe pour la valeur ANY de M - S_M_FROM = S_MAOR_POS + AOR_FROM # numéro relatif du groupe pour la valeur FROM de M - S_M_TO = S_MAOR_POS + AOR_TO # numéro relatif du groupe pour la valeur TO de M - S_Y_ANY = S_YAOR_POS + AOR_ANY # numéro relatif du groupe pour la valeur ANY de Y - S_Y_FROM = S_YAOR_POS + AOR_FROM # numéro relatif du groupe pour la valeur FROM de Y - S_Y_TO = S_YAOR_POS + AOR_TO # numéro relatif du groupe pour la valeur TO de Y - - RE_SPEC = re.compile(r'(?:(?:%s)|(?:%s))$' % (C, S)) - # offsets des positions des groupes dans l'expression RE_SPEC - SPEC_C_POS = 0 - SPEC_S_POS = SPEC_C_POS + C_COUNT - # position des groupes dans l'expression RE_SPEC - SPEC_C_OFF = 1 + SPEC_C_POS - CONS_OP = SPEC_C_OFF + C_OP - CONS_WD = SPEC_C_OFF + C_WD - SPEC_S_OFF = 1 + SPEC_S_POS - SPEC_OFFSET = SPEC_S_OFF + S_OFFSET - SPEC_WD_FROM = SPEC_S_OFF + S_WD_FROM - SPEC_WD_TO = SPEC_S_OFF + S_WD_TO - SPEC_D_ANY = SPEC_S_OFF + S_D_ANY - SPEC_D_FROM = SPEC_S_OFF + S_D_FROM - SPEC_D_TO = SPEC_S_OFF + S_D_TO - SPEC_M_ANY = SPEC_S_OFF + S_M_ANY - SPEC_M_FROM = SPEC_S_OFF + S_M_FROM - SPEC_M_TO = SPEC_S_OFF + S_M_TO - SPEC_Y_ANY = SPEC_S_OFF + S_Y_ANY - SPEC_Y_FROM = SPEC_S_OFF + S_Y_FROM - SPEC_Y_TO = SPEC_S_OFF + S_Y_TO - - def _range(f, t=None): - f = int(f) - if t is None: t = f - else: t = int(t) - if t < f: t, f = f, t - return (f, t) - _range = staticmethod(_range) - def _isw(vs): return vs == '*' - _isw = staticmethod(_isw) - def _isr(vs): return isseq(vs) - _isr = staticmethod(_isr) - def _matches(cls, vs, v): - if cls._isw(vs): return True - elif cls._isr(vs): return v >= vs[0] and v <= vs[1] - else: raise ValueError("Invalid format: %s" % _s(vs)) - _matches = classmethod(_matches) - def _tostr(cls, vs): - if cls._isw(vs): - return "*" - elif cls._isr(vs): - if vs[0] == vs[1]: return "%i" % vs[0] - else: return "%i-%i" % vs - else: raise ValueError("Invalid format: %s" % _s(vs)) - _tostr = classmethod(_tostr) - def _check_range(cls, name, vs, min, max): - if (min is not None and (vs[0] < min or vs[1] < min)) or \ - (max is not None and (vs[0] > max or vs[1] > max)): - if min is None: min = u"-INF" - else: min = str(min) - if max is None: max = u"+INF" - else: max = str(max) - raise ValueError("%s values must be in the [%s, %s] range, got %s" % (name, min, max, cls._tostr(vs))) - _check_range = classmethod(_check_range) - def _check_value(cls, name, v, min, max): - if (min is not None and v < min) or (max is not None and v > max): - if min is None: min = u"-INF" - else: min = str(min) - if max is None: max = u"+INF" - else: max = str(max) - raise ValueError("%s value must be in the [%s, %s] range, got %i" % (name, min, max, v)) - _check_value = classmethod(_check_value) - -class DateSpec(_DateSpecConstants): - """Une spécification de dates de la forme D[/M[/Y]], ou une spécification - de contrainte de date de la forme !W. - - - D peut prendre l'une des formes suivantes: - - soit des jours du moins sous la forme *, DAY ou FROM-TO. - - soit des jours de la semaine sous la forme "w"WEEKDAY ou "w"FROM-TO - avec 1=Lundi, ..., 7=Dimanche - - soit une expression relative de la forme "+"DAYS, qui représente - DAYS jours après une date de référence. - - M représente des mois sous la forme *, MONTH ou FROM-TO. - - Y représente des années sous la forme *, YEAR ou FROM-TO. - - W représente des jours de la semaine sous la forme "w"WEEKDAY ou - "n"WEEKDAY avec 1=Lundi, ..., 7=Dimanche - - Exemples: - - w1-5 - Les jours de la semaine - 15/1-6 - Les 15 des mois de janvier à juin - */1 - N'importe quel jour du mois de janvier - !w4 - Spécifier que le jour DOIT être un Jeudi. - !n4 - Spécifier que le jour DOIT être le Jeudi *suivant* la date de référence - """ - - class Strategy(_DateSpecConstants): - def matches(self, date): - u"""Tester si la date correspond à cette spécification de date - """ - raise NotImplementedError - - def fix(self, date, now=None, refdate=None): - u"""Corriger date, refdate étant la date de référence - """ - raise NotImplementedError - - def is_obsolete(self, now=None): - u"""Tester si cette spécification de date est obsolète, c'est à - dire si elle désigne une date passée. - """ - raise NotImplementedError - - class ConstraintStrategy(Strategy): - """Une contrainte de date: - - "!wWEEKDAY" signifie que le jour DOIT être celui spécifié, en restant - dans la semaine en cours. - - "!nWEEKDAY" signifie que le jour DOIT être celui spécifié, mais en - prenant toujours une date future. Il est alors possible de passer sur - la semaine suivante pour arriver au bon jour. - """ - _op = None # op: w ou n - _ws = None # weekdays - - def __init__(self, mo): - self._op = mo.group(self.CONS_OP) - ws = mo.group(self.CONS_WD) - if ws is not None: self._ws = self._range(ws) - if self._ws is not None: - self._check_range("WEEKDAYS", self._ws, 0, 7) - - def __str__(self): - s = "!" - if self._ws is not None: - s += self._op - s += self._tostr(self._ws) - return s - - def matches(self, date): - return True - - def fix(self, date, now=None, refdate=None): - date = ensure_date(date) - expected_wd = self._ws[0] - actual_wd = date.isoweekday() - if expected_wd != actual_wd: - date += expected_wd - actual_wd - if self._op == 'n' and actual_wd > expected_wd: - date += 7 - return date - - def is_obsolete(self, now=None): - return False - - class DateStrategy(Strategy): - """Une spécification de date - """ - _offset = None # offset - _ws = None # weekdays - _ds = None # days - _ms = None # months - _ys = None # years - - def __init__(self, mo): - # offset - o = mo.group(self.SPEC_OFFSET) - if o is None: pass - else: self._offset = self._range(o)[0] - if self._offset is not None: - self._check_value("OFFSET", self._offset, 1, None) - # weekdays - wf, wt = mo.group(self.SPEC_WD_FROM), mo.group(self.SPEC_WD_TO) - if wf is None and wt is None: pass - elif wt is not None: self._ws = self._range(wf, wt) - else: self._ws = self._range(wf) - if self._ws is not None: - self._check_range("WEEKDAYS", self._ws, 0, 7) - # days - dw, df, dt = mo.group(self.SPEC_D_ANY), mo.group(self.SPEC_D_FROM), mo.group(self.SPEC_D_TO) - if dw is None and df is None and dt is None: pass - elif dw is not None: self._ds = '*' - elif dt is not None: self._ds = self._range(df, dt) - else: self._ds = self._range(df) - # months - mw, mf, mt = mo.group(self.SPEC_M_ANY), mo.group(self.SPEC_M_FROM), mo.group(self.SPEC_M_TO) - if mw is None and mf is None and mt is None: self._ms = '*' - elif mw is not None: self._ms = '*' - elif mt is not None: self._ms = self._range(mf, mt) - else: self._ms = self._range(mf) - # years - yw, yf, yt = mo.group(self.SPEC_Y_ANY), mo.group(self.SPEC_Y_FROM), mo.group(self.SPEC_Y_TO) - if yw is None and yf is None and yt is None: self._ys = '*' - elif yw is not None: self._ys = '*' - elif yt is not None: self._ys = self._range(yf, yt) - else: self._ys = self._range(yf) - if self._isr(self._ys): - self._ys = map(_fix_year, self._ys) - - def __str__(self): - s = "" - if self._offset is not None: - s += "+%i" % self._offset - if self._ws is not None: - s += "w" - s += self._tostr(self._ws) - elif self._ds is not None: - s += self._tostr(self._ds) - s += "/" - s += self._tostr(self._ms) - s += "/" - s += self._tostr(self._ys) - return s - - def fill_ranges(self, yrs = None, mrs = None, drs = None, wrs = None): - if yrs is None: yrs = [] - yrs.append(self._ys) - if mrs is None: mrs = [] - mrs.append(self._ms) - if self._ws is not None: - if wrs is None: wrs = [] - wrs.append(self._ws) - elif self._ds is not None: - if drs is None: drs = [] - drs.append(self._ds) - return yrs, mrs, drs, wrs - - def matches(self, date): - date = ensure_date(date) - # tester l'année - if not self._matches(self._ys, date.year): return False - # tester le mois - if not self._matches(self._ms, date.month): return False - # tester weekday ou day - if self._ws is not None: - if not self._matches(self._ws, date.isoweekday()): return False - elif self._ds is not None: - if not self._matches(self._ds, date.day): return False - return True - - def fix(self, date, now=None, refdate=None): - if self._offset is not None: - if now is None: now = Date() - if refdate is None: refdate = now - date = refdate + self._offset - return date - - def is_obsolete(self, now=None): - if self._offset is not None: return False - elif self._ws is not None: return False - elif self._isw(self._ds): return False - elif self._isw(self._ms): return False - elif self._isw(self._ys): return False - if now is None: now = Date() - y = now.year; ys = self._ys - if y > ys[0] and y > ys[1]: return True - elif y < ys[0] and y < ys[1]: return False - m = now.month; ms = self._ms - if m > ms[0] and m > ms[1]: return True - elif m < ms[0] and m < ms[1]: return False - d = now.day; ds = self._ds - if d > ds[0] and d > ds[1]: return True - return False - - _strategy = None - strategy = property(lambda self: self._strategy) - - def is_constraint_spec(self): - """Retourner True s'il s'agit d'une spécification de contrainte de date - """ - return isinstance(self._strategy, self.ConstraintStrategy) - def is_date_spec(self): - """Retourner True s'il s'agit d'une spécification de date - """ - return isinstance(self._strategy, self.DateStrategy) - - def __init__(self, spec): - mo = self.RE_SPEC.match(spec) - if mo is None: - raise ValueError("Invalid DateSpec format: %s" % _s(spec)) - - if mo.group(self.CONS_WD) is None: strategy = self.DateStrategy(mo) - else: strategy = self.ConstraintStrategy(mo) - self._strategy = strategy - - def __str__(self): - return self._strategy.__str__() - - def __repr__(self): - return "%s(\"%s\")" % (self.__class__.__name__, self) - - def matches(self, date): - return self._strategy.matches(date) - - def fix(self, date, now=None, refdate=None): - return self._strategy.fix(date, now, refdate) - - def matches_fix(self, date, now=None, refdate=None): - if self.matches(date): return True, self.fix(date, now, refdate) - else: return False, date - - def is_obsolete(self): - return self._strategy.is_obsolete() - -class DateSpecs: - """Une suite de spécifications de date, séparées par des virgules. - - Attention! l'ordre est important, car les calculs et l'évaluation des - contraintes se fait dans l'ordre des spécifications. - """ - RE_COMMA = re.compile(r'\s*,\s*') - - _specs = None - def __constraint_specs(self): - return [spec for spec in self._specs if spec.is_constraint_spec()] - def __date_specs(self): - return [spec for spec in self._specs if spec.is_date_spec()] - - def __init__(self, specs): - specs = _s(specs).strip() - self._specs = [DateSpec(spec) for spec in self.RE_COMMA.split(specs)] - - def __str__(self): - return ",".join([str(spec) for spec in self._specs]) - - def __repr__(self): - return "%s(\"%s\")" % (self.__class__.__name__, self) - - def matches(self, date): - for spec in self._specs: - if spec.matches(date): return True - return False - - def matches_fix(self, date, now=None, refdate=None): - if now is None: now = Date() - if refdate is None: refdate = now - for spec in self.__date_specs(): - if spec.matches(date): - for spec in self._specs: - date = spec.fix(date, now, refdate) - return True, date - return False, date - - _now = None - _refdate = None - _candidates = None - - def _reset_candidates(self): - self._now = None - self._refdate = None - self._candidates = None - - def _get_candidates(self, now=None, refdate=None): - if now is None: now = Date() - if refdate is None: refdate = now - if self._candidates is not None and \ - now == self._now and refdate == self._refdate: - return self._candidates - - isw = DateSpec._isw - # Enumérer les candidats de weekdays, days, months, years - yrs = None - mrs = None - drs = None - wrs = None - for spec in self.__date_specs(): - yrs, mrs, drs, wrs = spec.strategy.fill_ranges(yrs, mrs, drs, wrs) - # Calculer les dates candidates - # ...years - candidates = {} - if yrs is None: yrs = ['*'] - for ys in yrs: - if ys == '*': - candidates[now.year] = {} - candidates[now.year + 1] = {} - else: - for y in range(ys[0], ys[1] + 1): - candidates[y] = {} - years = candidates.keys() - # ...months - for year in years: - if mrs is None: mrs = ['*'] - for ms in mrs: - if ms == '*': - candidates[year][now.month] = {} - candidates[year][now.month + 1] = {} - else: - for m in range(ms[0], ms[1] + 1): - candidates[year][m] = {} - # ...weekdays or days - for year in years: - for month in candidates[year].keys(): - monthdays = range(1, _monthdays(year, month) + 1) - #candidates[year][month]['ws'] = None - candidates[year][month]['ds'] = None - if wrs is not None: - # si on précise des jours de semaine, - # inclure tous les jours du mois - #ws = [] - #for wr in wrs: - # ws.extend(range(wr[0], wr[1] + 1)) - #candidates[year][month]['ws'] = ws - candidates[year][month]['ds'] = monthdays - elif drs is not None: - ds = [] - for dr in drs: - if isw(dr): ds.extend(monthdays) - else: ds.extend(range(dr[0], dr[1] + 1)) - candidates[year][month]['ds'] = ds - else: - # ni weekdays, ni days, prendre tous les jours du mois - # à configurer ci-dessous quand on saura quel mois prendre - candidates[year][month]['ds'] = monthdays - # fin - self._now = now - self._refdate = refdate - self._candidates = candidates - return candidates - - def get_next_date(self, now=None, refdate=None): - if now is None: now = Date() - if refdate is None: refdate = now - candidates = self._get_candidates(now, refdate) - for year in [year for year in sorted(candidates.keys()) - if year >= now.year]: - for month in [month for month in sorted(candidates[year].keys()) - if Date(0, month + 1, year) >= now]: - days = [day for day in candidates[year][month]['ds'] - if Date(day, month, year) > now] - #weekdays = candidates[year][month]['ws'] - for day in days: - next = Date(day, month, year) - matches, next = self.matches_fix(next, now, refdate) - if matches: return next - return None - - def remove_obsoletes(self): - specs = [spec for spec in self._specs if not spec.is_obsolete()] - if len(specs) != len(self._specs): - self._specs = specs - self._reset_candidates() - return True - else: - return False diff --git a/lib/nulib/python/nulib/editor.py b/lib/nulib/python/nulib/editor.py deleted file mode 100644 index a1e0ed2..0000000 --- a/lib/nulib/python/nulib/editor.py +++ /dev/null @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Des fonctions pour éditer des fichiers. -""" - -__all__ = ('edit_file', 'edit_template') - -import os, sys - -from .base import isseq -from .env import get_editor, get_editor_options, get_editor_setrow -from .uio import EditorIO, _s -from .lines import Lines -from .args import split_args -from .tmpfiles import mktemp -from .paths import in_PATH -from .procs import spawn - -# options, setrow, setcol, colplus -EDITOR_CAPS = {'emacs': ('', '+', ':', 1), - 'xemacs': ('', '+', ':', 1), - 'gvim': ('-f', '+', '', 0), - 'vim': ('-f', '+', '', 0), - 'vi': ('', '+', '', 0), - } -def get_default_editors(): - """Retourner une liste d'éditeurs par défaut pour la plateforme en cours - """ - if sys.platform.startswith('linux'): - return ('emacs', 'xemacs', 'gvim', 'vim', 'vi') - else: - return ('xemacs', 'emacs', 'gvim', 'vim', 'vi') - -def get_editor_caps(): - """Obtenir les caractéristiques de l'éditeur configuré. - - @return: (editor, options, setrow, setcol, colplus) - """ - options = None - setrow = None - setcol = '' - colplus = 0 - - editor = get_editor() - if editor is None: - for editor in get_default_editors(): - if in_PATH(editor): break - else: - raise OSError("Unable to find a default editor. Please set UTOOLS_EDITOR.") - - if EDITOR_CAPS.has_key(editor): - options, setrow, setcol, colplus = EDITOR_CAPS[editor] - - if options is None and setrow is None: - options = split_args(get_editor_options()) - setrow = get_editor_setrow() - if options is None and setrow is None and EDITOR_CAPS.has_key(editor): - options, setrow, setcol, colplus = EDITOR_CAPS[editor] - - return editor, options, setrow or '', setcol or '', int(colplus) - -def edit_file(file, row=None, col=None): - """Lancer un éditeur pour éditer le fichier file. - - @return: le status d'exécution de l'éditeur. - """ - editor, options, setrow, setcol, colplus = get_editor_caps() - - cmd = [editor] - if options: - if isseq(options): cmd.extend(options) - else: cmd.append(options) - if setrow and row is not None: - row = int(row) - opt = '%s%i' % (setrow, row) - if setcol and col is not None: - col = int(col) - opt += '%s%i' % (setcol, col + colplus) - cmd.append(opt) - cmd.append(file) - return spawn(*cmd) - -def edit_template(template=None, strip_prefix=None, row=None, col=None, lines=None): - """Obtenir une valeur éditée dans un éditeur. - - Un fichier temporaire vide est initialisé avec le contenu de template, - puis le fichier est proposé à l'édition. - - A la sortie, toutes les lignes commençant par strip_prefix sont supprimée, - et une instance de Lines avec les lignes du fichier est retourné. - - @return: lines - @rtype: Lines - """ - if lines is None: - uio = EditorIO() - lines = Lines(uio=uio) - else: - uio = lines.uio - if uio is None: - uio = EditorIO() - lines.uio = uio - - ## préparer le fichier - tmpf, tmpfile = mktemp('utools') - try: - if template is not None: - template = uio.s(template) - try: tmpf.write(template) - finally: tmpf.close() - else: - tmpf.close() - - ## l'éditer - edit_file(tmpfile, row, col) - - ## traiter le résultat - lines.readlines(tmpfile) - - # enlever les préfixes - if strip_prefix is not None: - lines.filter(lambda l: not l.startswith(strip_prefix)) - - # supprimer les lignes vides au début et à la fin - while lines and not lines[0].strip(): del lines[0] - while lines and not lines[-1].strip(): del lines[-1] - - return lines - finally: - os.remove(tmpfile) diff --git a/lib/nulib/python/nulib/encdetect.py b/lib/nulib/python/nulib/encdetect.py deleted file mode 100644 index cb4a2cc..0000000 --- a/lib/nulib/python/nulib/encdetect.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Fonctions pour détecter l'encoding d'une chaine ou d'un fichier, et/ou tester -si c'est un fichier binaire. -""" - -__all__ = ('UNRECOGNIZED_ENCODING', 'UNKNOWN_ENCODING', - 'guess_encoding', 'guess_string_encoding', 'guess_stream_encoding', - 'detect_line_encoding', 'guess_line_encoding', - 'FileType', - ) - -from os import path -import re - -from .base import isstr, make_prop -from .encoding import LATIN1, UTF8, MACROMAN -from .env import get_default_encoding - -# Les tableaux suivants contiennents les caractères suivants: -# àâçèéêîïñôû - -ISO_8859_1_CHARS = [ - 0xe0, 0xe2, 0xe7, 0xe8, 0xe9, 0xea, - 0xee, 0xef, 0xf1, 0xf4, 0xfb, -] - -MAC_ROMAN_CHARS = [ - 0x88, 0x89, 0x8d, 0x8f, 0x8e, 0x90, - 0x94, 0x95, 0x96, 0x99, 0x9e, -] - -# la séquence est 0xc3 puis l'un des caractères de ce tableau -UTF_8_CHARS = [ - 0xa0, 0xa2, 0xa7, 0xa8, 0xa9, 0xaa, - 0xae, 0xaf, 0xb1, 0xb4, 0xbb, -] - -UNKNOWN_ENCODING = "Unknown" -UNRECOGNIZED_ENCODING = "Unrecognized" -def guess_string_encoding(ins, unknown=UNKNOWN_ENCODING, unrecognized=UNRECOGNIZED_ENCODING): - ascii = True - i = 0 - max = len(ins) - while i < max: - b = ord(ins[i]) - if b >= 128: ascii = False - if b == 0xc3: - b = ord(ins[i + 1]) - if b in UTF_8_CHARS: return UTF8 - elif b in ISO_8859_1_CHARS: return LATIN1 - elif b in MAC_ROMAN_CHARS: return MACROMAN - elif not ascii: return unrecognized - i = i + 1 - if unknown is None: return get_default_encoding() - else: return unknown - -def guess_stream_encoding(inf, unknown=UNKNOWN_ENCODING, unrecognized=UNRECOGNIZED_ENCODING): - close_inf = False - if isstr(inf): - inf = open(inf, 'rb') - close_inf = True - try: - return guess_string_encoding(inf.read(), unknown, unrecognized) - finally: - if close_inf: inf.close() - -def guess_encoding(ins=None, inf=None, unknown=None, unrecognized=UNRECOGNIZED_ENCODING): - if ins is not None: return guess_string_encoding(ins, unknown, unrecognized) - elif inf is not None: return guess_stream_encoding(inf, unknown, unrecognized) - else: return unknown - -RE_ENCODING = re.compile(r'(?i)\b(?:en)?coding: (\S+)\b') -def detect_line_encoding(lines, examine_lines=10): - nb_lines = len(lines) - if nb_lines < 2 * examine_lines: - examine_lines = nb_lines - - for line in lines[:examine_lines]: - mo = RE_ENCODING.search(line) - if mo is not None: return mo.group(1) - if nb_lines > examine_lines: - for line in lines[-examine_lines:]: - mo = RE_ENCODING.search(line) - if mo is not None: return mo.group(1) - return None - -_UNKNOWN = object() -_UNRECOGNIZED = object() -def guess_line_encoding(lines, unknown=None, unrecognized=UNRECOGNIZED_ENCODING): - for line in lines: - encoding = guess_string_encoding(line, _UNKNOWN, _UNRECOGNIZED) - if encoding is _UNKNOWN: continue - elif encoding is _UNRECOGNIZED: return unrecognized - else: return encoding - if unknown is None: return get_default_encoding() - else: return unknown - -class FileType(object): - """Un objet servant à déterminer le type d'un fichier: - - texte ou binaire - - encoding - - XXX finir cette classe, et intégrer les fonctions de paths - """ - _check_ext, check_ext = make_prop('_check_ext', True)[:2] - _check_content, check_content = make_prop('_check_content', True)[:2] - _file, file = make_prop('_file')[:2] - - def __init__(self, file): - self._file = file - - def is_binary(self): - binary = self._binary - if binary is None and self.check_ext: - binary = self.is_binary_ext(self.file) - if binary is None and self.check_context: - content = self.get_content(self.file) - binary = self.is_binary_content(content) - if binary is not None: - self._binary = binary - return binary - _binary, binary = make_prop('_binary', getter=is_binary)[:2] - - def is_binary_ext(self, file): - _, filename = path.split(file) - _, ext = path.splitext(filename) - if filename == '.DS_Store': return True - else: return ext.lower() in ( - # exécutables et fichiers objets - '.bin', '.com', '.co_', '.exe', '.ex_', '.dll', - '.pyc', '.pyd', '.pyo', '.class', - '.o', '.so', '.so.*', '.lib', '.ovl', - # archives - '.gz', '.bz2', '.tar', '.tgz', '.tbz2', - '.hqx', '.sit', '.zip', '.jar', '.rpm', '.srpm', '.deb', - # multimédia - '.bmp', '.gif', '.png', '.jpeg', '.jpg', '.tif', '.tiff', - '.xbm', '.icns', '.ico', '.avi', '.mov', '.mpg', '.swf', - '.mp3', '.snd', '.ogg', '.dat', - # documents - '.doc', '.ppt', '.xls', '.pdf', - # divers - '.bpt', '.bro', '.eps', '.fm', '.ins', '.mcp', '.objectplant', - '.ofp', '.opn','.pqg', '.prj', '.ps', '.sl', '.strings', '.wordbreak', - ) - - def get_content(self, file): - pass #XXX - - def is_binary_content(self, content): - pass #XXX diff --git a/lib/nulib/python/nulib/encoding.py b/lib/nulib/python/nulib/encoding.py deleted file mode 100644 index 5896951..0000000 --- a/lib/nulib/python/nulib/encoding.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Gestion de la langue et de l'encoding par défaut. -""" - -__all__ = ('LATIN1', 'LATIN9', 'UTF8', 'MACROMAN', - 'normalize_encoding', 'get_encoding_or_default', - ) - -import os, locale -from locale import setlocale, LC_ALL, getlocale, getdefaultlocale - -# Gestion des formes normalisées des encodings -# note: Ces formes sont déclarées normalisées par rapport à ulib, et non par -# rapport à un quelconque organisme de normalisation. - -LATIN1 = 'iso-8859-1' -LATIN9 = 'iso-8859-15' -UTF8 = 'utf-8' -MACROMAN = 'MacRoman' - -ENCODING_MAP = {'latin-1': LATIN1, - 'latin1': LATIN1, - 'iso-8859-1': LATIN1, - 'iso-88591': LATIN1, - 'iso8859-1': LATIN1, - 'iso88591': LATIN1, - 'latin-9': LATIN9, - 'latin9': LATIN9, - 'iso-8859-15': LATIN9, - 'iso-885915': LATIN9, - 'iso8859-15': LATIN9, - 'iso885915': LATIN9, - 'utf-8': UTF8, - 'utf8': UTF8, - 'utf': UTF8, - } - -def normalize_encoding(encoding): - if encoding is None: return None - lencoding = str(encoding).lower().replace('_', '-') - return ENCODING_MAP.get(lencoding, encoding) - -DEFAULT_LANG = 'fr_FR.UTF-8' -LANG_MAP = {LATIN1: 'fr_FR', - LATIN9: 'fr_FR@euro', - UTF8: 'fr_FR.UTF-8', - } - -def get_lang_for_encoding(encoding): - return LANG_MAP.get(normalize_encoding(encoding), DEFAULT_LANG) - -def __set_locale_noexc(lang): - os.environ['LANG'] = lang - try: - setlocale(LC_ALL, '') - return True - except locale.Error: - return False - -__locale_set = False -def __set_locale(): - global __locale_set - if not __locale_set: - lang = os.environ.get('LANG', '') - if not lang or normalize_encoding(lang) == UTF8: - os.environ['LANG'] = DEFAULT_LANG - try: - setlocale(LC_ALL, '') - except locale.Error: - print "WARNING: La valeur LANG='%s' n'est pas valide ou n'a pas été reconnue par le systeme." % os.environ['LANG'] - langs = (LATIN1, LATIN9, 'C') - if os.environ['LANG'] != DEFAULT_LANG: - print "WARNING: La valeur LANG='%s' sera utilise à la place si possible." % DEFAULT_LANG - if __set_locale_noexc(DEFAULT_LANG): - langs = None - else: - print "WARNING: La valeur LANG='%s' n'a pas pu etre selectionnee." % DEFAULT_LANG - if langs is not None: - for lang in langs: - if __set_locale_noexc(lang): - print "NOTE: la valeur LANG='%s' a ete selectionnee" % lang - break - else: - print "WARNING: La valeur LANG='%s' n'a pas pu etre utilisee." % lang - - __locale_set = True - -try: from UTOOLS_CONFIG import SET_LOCALE -except ImportError: SET_LOCALE = True -if SET_LOCALE: __set_locale() - -def get_encoding_or_default(encoding=None, default_encoding=UTF8): - """Si encoding est None, essayer de déterminer l'encoding par défaut avec - getlocale(), getdefaultlocale() puis default_encoding. - """ - if encoding is None: _, encoding = getlocale() - if encoding is None: _, encoding = getdefaultlocale() - if encoding is None: encoding = default_encoding - return normalize_encoding(encoding) diff --git a/lib/nulib/python/nulib/env.py b/lib/nulib/python/nulib/env.py deleted file mode 100644 index 878dfbf..0000000 --- a/lib/nulib/python/nulib/env.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -"""Accès aux paramètres configurables de ulib/utools dans l'environnement. -""" - -__all__ = ('get_default_encoding', 'get_input_encoding', 'get_output_encoding', - 'get_editor', 'get_editor_options', 'get_editor_setrow', 'get_editor_encoding', - 'get_pager', - ) - -from os import environ - -from .encoding import get_encoding_or_default, UTF8 - -try: from nulib_config import DEFAULT_INPUT_ENCODING -except ImportError: DEFAULT_INPUT_ENCODING = UTF8 -try: from nulib_config import DEFAULT_OUTPUT_ENCODING -except ImportError: DEFAULT_OUTPUT_ENCODING = UTF8 - -def get_default_encoding(encoding=None, default_encoding=DEFAULT_OUTPUT_ENCODING): - """Si encoding est None, essayer de déterminer l'encoding par défaut avec - getlocale(), getdefaultlocale() puis DEFAULT_ENCODING. - """ - return get_encoding_or_default(encoding, default_encoding) - -def get_input_encoding(): - encoding = environ.get('UTOOLS_INPUT_ENCODING', None) - if encoding is None: - encoding = environ.get('UTOOLS_OUTPUT_ENCODING', None) - return get_default_encoding(encoding, DEFAULT_INPUT_ENCODING) - -def get_output_encoding(): - encoding = environ.get('UTOOLS_OUTPUT_ENCODING', None) - return get_default_encoding(encoding, DEFAULT_OUTPUT_ENCODING) - -def get_editor(): - return environ.get('UTOOLS_EDITOR', environ.get('EDITOR', None)) -def get_editor_options(): - return environ.get('UTOOLS_EDITOR_OPTIONS', None) -def get_editor_setrow(): - return environ.get('UTOOLS_EDITOR_SETROW', None) -def get_editor_encoding(): - encoding = environ.get('UTOOLS_EDITOR_ENCODING', None) - if encoding is None: - encoding = environ.get('UTOOLS_INPUT_ENCODING', None) - if encoding is None: - encoding = environ.get('UTOOLS_OUTPUT_ENCODING', None) - return get_default_encoding(encoding, DEFAULT_INPUT_ENCODING) - -def get_pager(): - return environ.get('UTOOLS_PAGER', environ.get('PAGER', None)) -def get_pager_options(): - return environ.get('UTOOLS_PAGER_OPTIONS', None) diff --git a/lib/nulib/python/nulib/ext/__init__.py b/lib/nulib/python/nulib/ext/__init__.py deleted file mode 100644 index 9d853e8..0000000 --- a/lib/nulib/python/nulib/ext/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8 - -__all__ = () - diff --git a/lib/nulib/python/nulib/ext/flup/__init__.py b/lib/nulib/python/nulib/ext/flup/__init__.py deleted file mode 100644 index 792d600..0000000 --- a/lib/nulib/python/nulib/ext/flup/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# diff --git a/lib/nulib/python/nulib/ext/flup/client/__init__.py b/lib/nulib/python/nulib/ext/flup/client/__init__.py deleted file mode 100644 index 792d600..0000000 --- a/lib/nulib/python/nulib/ext/flup/client/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# diff --git a/lib/nulib/python/nulib/ext/flup/client/fcgi_app.py b/lib/nulib/python/nulib/ext/flup/client/fcgi_app.py deleted file mode 100644 index c1c15ec..0000000 --- a/lib/nulib/python/nulib/ext/flup/client/fcgi_app.py +++ /dev/null @@ -1,461 +0,0 @@ -# Copyright (c) 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import select -import struct -import socket -import errno - -__all__ = ['FCGIApp'] - -# Constants from the spec. -FCGI_LISTENSOCK_FILENO = 0 - -FCGI_HEADER_LEN = 8 - -FCGI_VERSION_1 = 1 - -FCGI_BEGIN_REQUEST = 1 -FCGI_ABORT_REQUEST = 2 -FCGI_END_REQUEST = 3 -FCGI_PARAMS = 4 -FCGI_STDIN = 5 -FCGI_STDOUT = 6 -FCGI_STDERR = 7 -FCGI_DATA = 8 -FCGI_GET_VALUES = 9 -FCGI_GET_VALUES_RESULT = 10 -FCGI_UNKNOWN_TYPE = 11 -FCGI_MAXTYPE = FCGI_UNKNOWN_TYPE - -FCGI_NULL_REQUEST_ID = 0 - -FCGI_KEEP_CONN = 1 - -FCGI_RESPONDER = 1 -FCGI_AUTHORIZER = 2 -FCGI_FILTER = 3 - -FCGI_REQUEST_COMPLETE = 0 -FCGI_CANT_MPX_CONN = 1 -FCGI_OVERLOADED = 2 -FCGI_UNKNOWN_ROLE = 3 - -FCGI_MAX_CONNS = 'FCGI_MAX_CONNS' -FCGI_MAX_REQS = 'FCGI_MAX_REQS' -FCGI_MPXS_CONNS = 'FCGI_MPXS_CONNS' - -FCGI_Header = '!BBHHBx' -FCGI_BeginRequestBody = '!HB5x' -FCGI_EndRequestBody = '!LB3x' -FCGI_UnknownTypeBody = '!B7x' - -FCGI_BeginRequestBody_LEN = struct.calcsize(FCGI_BeginRequestBody) -FCGI_EndRequestBody_LEN = struct.calcsize(FCGI_EndRequestBody) -FCGI_UnknownTypeBody_LEN = struct.calcsize(FCGI_UnknownTypeBody) - -if __debug__: - import time - - # Set non-zero to write debug output to a file. - DEBUG = 0 - DEBUGLOG = '/tmp/fcgi_app.log' - - def _debug(level, msg): - if DEBUG < level: - return - - try: - f = open(DEBUGLOG, 'a') - f.write('%sfcgi: %s\n' % (time.ctime()[4:-4], msg)) - f.close() - except: - pass - -def decode_pair(s, pos=0): - """ - Decodes a name/value pair. - - The number of bytes decoded as well as the name/value pair - are returned. - """ - nameLength = ord(s[pos]) - if nameLength & 128: - nameLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff - pos += 4 - else: - pos += 1 - - valueLength = ord(s[pos]) - if valueLength & 128: - valueLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff - pos += 4 - else: - pos += 1 - - name = s[pos:pos+nameLength] - pos += nameLength - value = s[pos:pos+valueLength] - pos += valueLength - - return (pos, (name, value)) - -def encode_pair(name, value): - """ - Encodes a name/value pair. - - The encoded string is returned. - """ - nameLength = len(name) - if nameLength < 128: - s = chr(nameLength) - else: - s = struct.pack('!L', nameLength | 0x80000000L) - - valueLength = len(value) - if valueLength < 128: - s += chr(valueLength) - else: - s += struct.pack('!L', valueLength | 0x80000000L) - - return s + name + value - -class Record(object): - """ - A FastCGI Record. - - Used for encoding/decoding records. - """ - def __init__(self, type=FCGI_UNKNOWN_TYPE, requestId=FCGI_NULL_REQUEST_ID): - self.version = FCGI_VERSION_1 - self.type = type - self.requestId = requestId - self.contentLength = 0 - self.paddingLength = 0 - self.contentData = '' - - def _recvall(sock, length): - """ - Attempts to receive length bytes from a socket, blocking if necessary. - (Socket may be blocking or non-blocking.) - """ - dataList = [] - recvLen = 0 - while length: - try: - data = sock.recv(length) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([sock], [], []) - continue - else: - raise - if not data: # EOF - break - dataList.append(data) - dataLen = len(data) - recvLen += dataLen - length -= dataLen - return ''.join(dataList), recvLen - _recvall = staticmethod(_recvall) - - def read(self, sock): - """Read and decode a Record from a socket.""" - try: - header, length = self._recvall(sock, FCGI_HEADER_LEN) - except: - raise EOFError - - if length < FCGI_HEADER_LEN: - raise EOFError - - self.version, self.type, self.requestId, self.contentLength, \ - self.paddingLength = struct.unpack(FCGI_Header, header) - - if __debug__: _debug(9, 'read: fd = %d, type = %d, requestId = %d, ' - 'contentLength = %d' % - (sock.fileno(), self.type, self.requestId, - self.contentLength)) - - if self.contentLength: - try: - self.contentData, length = self._recvall(sock, - self.contentLength) - except: - raise EOFError - - if length < self.contentLength: - raise EOFError - - if self.paddingLength: - try: - self._recvall(sock, self.paddingLength) - except: - raise EOFError - - def _sendall(sock, data): - """ - Writes data to a socket and does not return until all the data is sent. - """ - length = len(data) - while length: - try: - sent = sock.send(data) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([], [sock], []) - continue - else: - raise - data = data[sent:] - length -= sent - _sendall = staticmethod(_sendall) - - def write(self, sock): - """Encode and write a Record to a socket.""" - self.paddingLength = -self.contentLength & 7 - - if __debug__: _debug(9, 'write: fd = %d, type = %d, requestId = %d, ' - 'contentLength = %d' % - (sock.fileno(), self.type, self.requestId, - self.contentLength)) - - header = struct.pack(FCGI_Header, self.version, self.type, - self.requestId, self.contentLength, - self.paddingLength) - self._sendall(sock, header) - if self.contentLength: - self._sendall(sock, self.contentData) - if self.paddingLength: - self._sendall(sock, '\x00'*self.paddingLength) - -class FCGIApp(object): - def __init__(self, command=None, connect=None, host=None, port=None, - filterEnviron=True): - if host is not None: - assert port is not None - connect=(host, port) - - assert (command is not None and connect is None) or \ - (command is None and connect is not None) - - self._command = command - self._connect = connect - - self._filterEnviron = filterEnviron - - #sock = self._getConnection() - #print self._fcgiGetValues(sock, ['FCGI_MAX_CONNS', 'FCGI_MAX_REQS', 'FCGI_MPXS_CONNS']) - #sock.close() - - def __call__(self, environ, start_response): - # For sanity's sake, we don't care about FCGI_MPXS_CONN - # (connection multiplexing). For every request, we obtain a new - # transport socket, perform the request, then discard the socket. - # This is, I believe, how mod_fastcgi does things... - - sock = self._getConnection() - - # Since this is going to be the only request on this connection, - # set the request ID to 1. - requestId = 1 - - # Begin the request - rec = Record(FCGI_BEGIN_REQUEST, requestId) - rec.contentData = struct.pack(FCGI_BeginRequestBody, FCGI_RESPONDER, 0) - rec.contentLength = FCGI_BeginRequestBody_LEN - rec.write(sock) - - # Filter WSGI environ and send it as FCGI_PARAMS - if self._filterEnviron: - params = self._defaultFilterEnviron(environ) - else: - params = self._lightFilterEnviron(environ) - # TODO: Anything not from environ that needs to be sent also? - self._fcgiParams(sock, requestId, params) - self._fcgiParams(sock, requestId, {}) - - # Transfer wsgi.input to FCGI_STDIN - content_length = int(environ.get('CONTENT_LENGTH') or 0) - while True: - chunk_size = min(content_length, 4096) - s = environ['wsgi.input'].read(chunk_size) - content_length -= len(s) - rec = Record(FCGI_STDIN, requestId) - rec.contentData = s - rec.contentLength = len(s) - rec.write(sock) - - if not s: break - - # Empty FCGI_DATA stream - rec = Record(FCGI_DATA, requestId) - rec.write(sock) - - # Main loop. Process FCGI_STDOUT, FCGI_STDERR, FCGI_END_REQUEST - # records from the application. - result = [] - while True: - inrec = Record() - inrec.read(sock) - if inrec.type == FCGI_STDOUT: - if inrec.contentData: - result.append(inrec.contentData) - else: - # TODO: Should probably be pedantic and no longer - # accept FCGI_STDOUT records? - pass - elif inrec.type == FCGI_STDERR: - # Simply forward to wsgi.errors - environ['wsgi.errors'].write(inrec.contentData) - elif inrec.type == FCGI_END_REQUEST: - # TODO: Process appStatus/protocolStatus fields? - break - - # Done with this transport socket, close it. (FCGI_KEEP_CONN was not - # set in the FCGI_BEGIN_REQUEST record we sent above. So the - # application is expected to do the same.) - sock.close() - - result = ''.join(result) - - # Parse response headers from FCGI_STDOUT - status = '200 OK' - headers = [] - pos = 0 - while True: - eolpos = result.find('\n', pos) - if eolpos < 0: break - line = result[pos:eolpos-1] - pos = eolpos + 1 - - # strip in case of CR. NB: This will also strip other - # whitespace... - line = line.strip() - - # Empty line signifies end of headers - if not line: break - - # TODO: Better error handling - header, value = line.split(':', 1) - header = header.strip().lower() - value = value.strip() - - if header == 'status': - # Special handling of Status header - status = value - if status.find(' ') < 0: - # Append a dummy reason phrase if one was not provided - status += ' FCGIApp' - else: - headers.append((header, value)) - - result = result[pos:] - - # Set WSGI status, headers, and return result. - start_response(status, headers) - return [result] - - def _getConnection(self): - if self._connect is not None: - # The simple case. Create a socket and connect to the - # application. - if type(self._connect) is str: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - else: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(self._connect) - return sock - - # To be done when I have more time... - raise NotImplementedError, 'Launching and managing FastCGI programs not yet implemented' - - def _fcgiGetValues(self, sock, vars): - # Construct FCGI_GET_VALUES record - outrec = Record(FCGI_GET_VALUES) - data = [] - for name in vars: - data.append(encode_pair(name, '')) - data = ''.join(data) - outrec.contentData = data - outrec.contentLength = len(data) - outrec.write(sock) - - # Await response - inrec = Record() - inrec.read(sock) - result = {} - if inrec.type == FCGI_GET_VALUES_RESULT: - pos = 0 - while pos < inrec.contentLength: - pos, (name, value) = decode_pair(inrec.contentData, pos) - result[name] = value - return result - - def _fcgiParams(self, sock, requestId, params): - rec = Record(FCGI_PARAMS, requestId) - data = [] - for name,value in params.items(): - data.append(encode_pair(name, value)) - data = ''.join(data) - rec.contentData = data - rec.contentLength = len(data) - rec.write(sock) - - _environPrefixes = ['SERVER_', 'HTTP_', 'REQUEST_', 'REMOTE_', 'PATH_', - 'CONTENT_'] - _environCopies = ['SCRIPT_NAME', 'QUERY_STRING', 'AUTH_TYPE'] - _environRenames = {} - - def _defaultFilterEnviron(self, environ): - result = {} - for n in environ.keys(): - for p in self._environPrefixes: - if n.startswith(p): - result[n] = environ[n] - if n in self._environCopies: - result[n] = environ[n] - if n in self._environRenames: - result[self._environRenames[n]] = environ[n] - - return result - - def _lightFilterEnviron(self, environ): - result = {} - for n in environ.keys(): - if n.upper() == n: - result[n] = environ[n] - return result - -if __name__ == '__main__': - from flup.server.ajp import WSGIServer - app = FCGIApp(connect=('localhost', 4242)) - #import paste.lint - #app = paste.lint.middleware(app) - WSGIServer(app).run() diff --git a/lib/nulib/python/nulib/ext/flup/client/scgi_app.py b/lib/nulib/python/nulib/ext/flup/client/scgi_app.py deleted file mode 100644 index c26cd58..0000000 --- a/lib/nulib/python/nulib/ext/flup/client/scgi_app.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright (c) 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import select -import struct -import socket -import errno - -__all__ = ['SCGIApp'] - -def encodeNetstring(s): - return ''.join([str(len(s)), ':', s, ',']) - -class SCGIApp(object): - def __init__(self, connect=None, host=None, port=None, - filterEnviron=True): - if host is not None: - assert port is not None - connect=(host, port) - - assert connect is not None - self._connect = connect - - self._filterEnviron = filterEnviron - - def __call__(self, environ, start_response): - sock = self._getConnection() - - outfile = sock.makefile('w') - infile = sock.makefile('r') - - sock.close() - - # Filter WSGI environ and send as request headers - if self._filterEnviron: - headers = self._defaultFilterEnviron(environ) - else: - headers = self._lightFilterEnviron(environ) - # TODO: Anything not from environ that needs to be sent also? - - content_length = int(environ.get('CONTENT_LENGTH') or 0) - if headers.has_key('CONTENT_LENGTH'): - del headers['CONTENT_LENGTH'] - - headers_out = ['CONTENT_LENGTH', str(content_length), 'SCGI', '1'] - for k,v in headers.items(): - headers_out.append(k) - headers_out.append(v) - headers_out.append('') # For trailing NUL - outfile.write(encodeNetstring('\x00'.join(headers_out))) - - # Transfer wsgi.input to outfile - while True: - chunk_size = min(content_length, 4096) - s = environ['wsgi.input'].read(chunk_size) - content_length -= len(s) - outfile.write(s) - - if not s: break - - outfile.close() - - # Read result from SCGI server - result = [] - while True: - buf = infile.read(4096) - if not buf: break - - result.append(buf) - - infile.close() - - result = ''.join(result) - - # Parse response headers - status = '200 OK' - headers = [] - pos = 0 - while True: - eolpos = result.find('\n', pos) - if eolpos < 0: break - line = result[pos:eolpos-1] - pos = eolpos + 1 - - # strip in case of CR. NB: This will also strip other - # whitespace... - line = line.strip() - - # Empty line signifies end of headers - if not line: break - - # TODO: Better error handling - header, value = line.split(':', 1) - header = header.strip().lower() - value = value.strip() - - if header == 'status': - # Special handling of Status header - status = value - if status.find(' ') < 0: - # Append a dummy reason phrase if one was not provided - status += ' SCGIApp' - else: - headers.append((header, value)) - - result = result[pos:] - - # Set WSGI status, headers, and return result. - start_response(status, headers) - return [result] - - def _getConnection(self): - if type(self._connect) is str: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - else: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(self._connect) - return sock - - _environPrefixes = ['SERVER_', 'HTTP_', 'REQUEST_', 'REMOTE_', 'PATH_', - 'CONTENT_'] - _environCopies = ['SCRIPT_NAME', 'QUERY_STRING', 'AUTH_TYPE'] - _environRenames = {} - - def _defaultFilterEnviron(self, environ): - result = {} - for n in environ.keys(): - for p in self._environPrefixes: - if n.startswith(p): - result[n] = environ[n] - if n in self._environCopies: - result[n] = environ[n] - if n in self._environRenames: - result[self._environRenames[n]] = environ[n] - - return result - - def _lightFilterEnviron(self, environ): - result = {} - for n in environ.keys(): - if n.upper() == n: - result[n] = environ[n] - return result - -if __name__ == '__main__': - from flup.server.ajp import WSGIServer - app = SCGIApp(connect=('localhost', 4000)) - #import paste.lint - #app = paste.lint.middleware(app) - WSGIServer(app).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/__init__.py b/lib/nulib/python/nulib/ext/flup/server/__init__.py deleted file mode 100644 index 792d600..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# diff --git a/lib/nulib/python/nulib/ext/flup/server/ajp.py b/lib/nulib/python/nulib/ext/flup/server/ajp.py deleted file mode 100644 index 3dca295..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/ajp.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -ajp - an AJP 1.3/WSGI gateway. - -For more information about AJP and AJP connectors for your web server, see -. - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - import sys - from myapplication import app # Assume app is your WSGI application object - from ajp import WSGIServer - ret = WSGIServer(app).run() - sys.exit(ret and 42 or 0) - -See the documentation for WSGIServer for more information. - -About the bit of logic at the end: -Upon receiving SIGHUP, the python script will exit with status code 42. This -can be used by a wrapper script to determine if the python script should be -re-run. When a SIGINT or SIGTERM is received, the script exits with status -code 0, possibly indicating a normal exit. - -Example wrapper script: - - #!/bin/sh - STATUS=42 - while test $STATUS -eq 42; do - python "$@" that_script_above.py - STATUS=$? - done - -Example workers.properties (for mod_jk): - - worker.list=foo - worker.foo.port=8009 - worker.foo.host=localhost - worker.foo.type=ajp13 - -Example httpd.conf (for mod_jk): - - JkWorkersFile /path/to/workers.properties - JkMount /* foo - -Note that if you mount your ajp application anywhere but the root ("/"), you -SHOULD specifiy scriptName to the WSGIServer constructor. This will ensure -that SCRIPT_NAME/PATH_INFO are correctly deduced. -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import socket -import logging - -from flup.server.ajp_base import BaseAJPServer, Connection -from flup.server.threadedserver import ThreadedServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseAJPServer, ThreadedServer): - """ - AJP1.3/WSGI server. Runs your WSGI application as a persistant program - that understands AJP1.3. Opens up a TCP socket, binds it, and then - waits for forwarded requests from your webserver. - - Why AJP? Two good reasons are that AJP provides load-balancing and - fail-over support. Personally, I just wanted something new to - implement. :) - - Of course you will need an AJP1.3 connector for your webserver (e.g. - mod_jk) - see . - """ - def __init__(self, application, scriptName='', environ=None, - multithreaded=True, multiprocess=False, - bindAddress=('localhost', 8009), allowedServers=None, - loggingLevel=logging.INFO, debug=True, **kw): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - bindAddress is the address to bind to, which must be a tuple of - length 2. The first element is a string, which is the host name - or IPv4 address of a local interface. The 2nd element is the port - number. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. - - loggingLevel sets the logging level of the module-level logger. - """ - BaseAJPServer.__init__(self, application, - scriptName=scriptName, - environ=environ, - multithreaded=multithreaded, - multiprocess=multiprocess, - bindAddress=bindAddress, - allowedServers=allowedServers, - loggingLevel=loggingLevel, - debug=debug) - for key in ('jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - ThreadedServer.__init__(self, jobClass=Connection, jobArgs=(self,), - **kw) - - def run(self): - """ - Main loop. Call this after instantiating WSGIServer. SIGHUP, SIGINT, - SIGQUIT, SIGTERM cause it to cleanup and return. (If a SIGHUP - is caught, this method returns True. Returns False otherwise.) - """ - self.logger.info('%s starting up', self.__class__.__name__) - - try: - sock = self._setupSocket() - except socket.error, e: - self.logger.error('Failed to bind socket (%s), exiting', e[1]) - return False - - ret = ThreadedServer.run(self, sock) - - self._cleanupSocket(sock) - - self.logger.info('%s shutting down%s', self.__class__.__name__, - self._hupReceived and ' (reload requested)' or '') - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - # Explicitly set bindAddress to *:8009 for testing. - WSGIServer(test_app, - bindAddress=('', 8009), allowedServers=None, - loggingLevel=logging.DEBUG).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/ajp_base.py b/lib/nulib/python/nulib/ext/flup/server/ajp_base.py deleted file mode 100644 index 2acff01..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/ajp_base.py +++ /dev/null @@ -1,956 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import socket -import select -import struct -import signal -import logging -import errno -import datetime -import time - -# Unfortunately, for now, threads are required. -import thread -import threading - -__all__ = ['BaseAJPServer'] - -class NoDefault(object): - pass - -# Packet header prefixes. -SERVER_PREFIX = '\x12\x34' -CONTAINER_PREFIX = 'AB' - -# Server packet types. -PKTTYPE_FWD_REQ = '\x02' -PKTTYPE_SHUTDOWN = '\x07' -PKTTYPE_PING = '\x08' -PKTTYPE_CPING = '\x0a' - -# Container packet types. -PKTTYPE_SEND_BODY = '\x03' -PKTTYPE_SEND_HEADERS = '\x04' -PKTTYPE_END_RESPONSE = '\x05' -PKTTYPE_GET_BODY = '\x06' -PKTTYPE_CPONG = '\x09' - -# Code tables for methods/headers/attributes. -methodTable = [ - None, - 'OPTIONS', - 'GET', - 'HEAD', - 'POST', - 'PUT', - 'DELETE', - 'TRACE', - 'PROPFIND', - 'PROPPATCH', - 'MKCOL', - 'COPY', - 'MOVE', - 'LOCK', - 'UNLOCK', - 'ACL', - 'REPORT', - 'VERSION-CONTROL', - 'CHECKIN', - 'CHECKOUT', - 'UNCHECKOUT', - 'SEARCH', - 'MKWORKSPACE', - 'UPDATE', - 'LABEL', - 'MERGE', - 'BASELINE_CONTROL', - 'MKACTIVITY' - ] - -requestHeaderTable = [ - None, - 'Accept', - 'Accept-Charset', - 'Accept-Encoding', - 'Accept-Language', - 'Authorization', - 'Connection', - 'Content-Type', - 'Content-Length', - 'Cookie', - 'Cookie2', - 'Host', - 'Pragma', - 'Referer', - 'User-Agent' - ] - -attributeTable = [ - None, - 'CONTEXT', - 'SERVLET_PATH', - 'REMOTE_USER', - 'AUTH_TYPE', - 'QUERY_STRING', - 'JVM_ROUTE', - 'SSL_CERT', - 'SSL_CIPHER', - 'SSL_SESSION', - None, # name follows - 'SSL_KEY_SIZE' - ] - -responseHeaderTable = [ - None, - 'content-type', - 'content-language', - 'content-length', - 'date', - 'last-modified', - 'location', - 'set-cookie', - 'set-cookie2', - 'servlet-engine', - 'status', - 'www-authenticate' - ] - -# The main classes use this name for logging. -LoggerName = 'ajp-wsgi' - -# Set up module-level logger. -console = logging.StreamHandler() -console.setLevel(logging.DEBUG) -console.setFormatter(logging.Formatter('%(asctime)s : %(message)s', - '%Y-%m-%d %H:%M:%S')) -logging.getLogger(LoggerName).addHandler(console) -del console - -class ProtocolError(Exception): - """ - Exception raised when the server does something unexpected or - sends garbled data. Usually leads to a Connection closing. - """ - pass - -def decodeString(data, pos=0): - """Decode a string.""" - try: - length = struct.unpack('>H', data[pos:pos+2])[0] - pos += 2 - if length == 0xffff: # This was undocumented! - return '', pos - s = data[pos:pos+length] - return s, pos+length+1 # Don't forget NUL - except Exception, e: - raise ProtocolError, 'decodeString: '+str(e) - -def decodeRequestHeader(data, pos=0): - """Decode a request header/value pair.""" - try: - if data[pos] == '\xa0': - # Use table - i = ord(data[pos+1]) - name = requestHeaderTable[i] - if name is None: - raise ValueError, 'bad request header code' - pos += 2 - else: - name, pos = decodeString(data, pos) - value, pos = decodeString(data, pos) - return name, value, pos - except Exception, e: - raise ProtocolError, 'decodeRequestHeader: '+str(e) - -def decodeAttribute(data, pos=0): - """Decode a request attribute.""" - try: - i = ord(data[pos]) - pos += 1 - if i == 0xff: - # end - return None, None, pos - elif i == 0x0a: - # name follows - name, pos = decodeString(data, pos) - elif i == 0x0b: - # Special handling of SSL_KEY_SIZE. - name = attributeTable[i] - # Value is an int, not a string. - value = struct.unpack('>H', data[pos:pos+2])[0] - return name, str(value), pos+2 - else: - name = attributeTable[i] - if name is None: - raise ValueError, 'bad attribute code' - value, pos = decodeString(data, pos) - return name, value, pos - except Exception, e: - raise ProtocolError, 'decodeAttribute: '+str(e) - -def encodeString(s): - """Encode a string.""" - return struct.pack('>H', len(s)) + s + '\x00' - -def encodeResponseHeader(name, value): - """Encode a response header/value pair.""" - lname = name.lower() - if lname in responseHeaderTable: - # Use table - i = responseHeaderTable.index(lname) - out = '\xa0' + chr(i) - else: - out = encodeString(name) - out += encodeString(value) - return out - -class Packet(object): - """An AJP message packet.""" - def __init__(self): - self.data = '' - # Don't set this on write, it will be calculated automatically. - self.length = 0 - - def _recvall(sock, length): - """ - Attempts to receive length bytes from a socket, blocking if necessary. - (Socket may be blocking or non-blocking.) - """ - dataList = [] - recvLen = 0 - while length: - try: - data = sock.recv(length) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([sock], [], []) - continue - else: - raise - if not data: # EOF - break - dataList.append(data) - dataLen = len(data) - recvLen += dataLen - length -= dataLen - return ''.join(dataList), recvLen - _recvall = staticmethod(_recvall) - - def read(self, sock): - """Attempt to read a packet from the server.""" - try: - header, length = self._recvall(sock, 4) - except socket.error: - # Treat any sort of socket errors as EOF (close Connection). - raise EOFError - - if length < 4: - raise EOFError - - if header[:2] != SERVER_PREFIX: - raise ProtocolError, 'invalid header' - - self.length = struct.unpack('>H', header[2:4])[0] - if self.length: - try: - self.data, length = self._recvall(sock, self.length) - except socket.error: - raise EOFError - - if length < self.length: - raise EOFError - - def _sendall(sock, data): - """ - Writes data to a socket and does not return until all the data is sent. - """ - length = len(data) - while length: - try: - sent = sock.send(data) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([], [sock], []) - continue - else: - raise - data = data[sent:] - length -= sent - _sendall = staticmethod(_sendall) - - def write(self, sock): - """Send a packet to the server.""" - self.length = len(self.data) - self._sendall(sock, CONTAINER_PREFIX + struct.pack('>H', self.length)) - if self.length: - self._sendall(sock, self.data) - -class InputStream(object): - """ - File-like object that represents the request body (if any). Supports - the bare mininum methods required by the WSGI spec. Thanks to - StringIO for ideas. - """ - def __init__(self, conn): - self._conn = conn - - # See WSGIServer. - self._shrinkThreshold = conn.server.inputStreamShrinkThreshold - - self._buf = '' - self._bufList = [] - self._pos = 0 # Current read position. - self._avail = 0 # Number of bytes currently available. - self._length = 0 # Set to Content-Length in request. - - self.logger = logging.getLogger(LoggerName) - - def bytesAvailForAdd(self): - return self._length - self._avail - - def _shrinkBuffer(self): - """Gets rid of already read data (since we can't rewind).""" - if self._pos >= self._shrinkThreshold: - self._buf = self._buf[self._pos:] - self._avail -= self._pos - self._length -= self._pos - self._pos = 0 - - assert self._avail >= 0 and self._length >= 0 - - def _waitForData(self): - toAdd = min(self.bytesAvailForAdd(), 0xffff) - assert toAdd > 0 - pkt = Packet() - pkt.data = PKTTYPE_GET_BODY + \ - struct.pack('>H', toAdd) - self._conn.writePacket(pkt) - self._conn.processInput() - - def read(self, n=-1): - if self._pos == self._length: - return '' - while True: - if n < 0 or (self._avail - self._pos) < n: - # Not enough data available. - if not self.bytesAvailForAdd(): - # And there's no more coming. - newPos = self._avail - break - else: - # Ask for more data and wait. - self._waitForData() - continue - else: - newPos = self._pos + n - break - # Merge buffer list, if necessary. - if self._bufList: - self._buf += ''.join(self._bufList) - self._bufList = [] - r = self._buf[self._pos:newPos] - self._pos = newPos - self._shrinkBuffer() - return r - - def readline(self, length=None): - if self._pos == self._length: - return '' - while True: - # Unfortunately, we need to merge the buffer list early. - if self._bufList: - self._buf += ''.join(self._bufList) - self._bufList = [] - # Find newline. - i = self._buf.find('\n', self._pos) - if i < 0: - # Not found? - if not self.bytesAvailForAdd(): - # No more data coming. - newPos = self._avail - break - else: - if length is not None and len(self._buf) >= length + self._pos: - newPos = self._pos + length - break - # Wait for more to come. - self._waitForData() - continue - else: - newPos = i + 1 - break - r = self._buf[self._pos:newPos] - self._pos = newPos - self._shrinkBuffer() - return r - - def readlines(self, sizehint=0): - total = 0 - lines = [] - line = self.readline() - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline() - return lines - - def __iter__(self): - return self - - def next(self): - r = self.readline() - if not r: - raise StopIteration - return r - - def setDataLength(self, length): - """ - Once Content-Length is known, Request calls this method to set it. - """ - self._length = length - - def addData(self, data): - """ - Adds data from the server to this InputStream. Note that we never ask - the server for data beyond the Content-Length, so the server should - never send us an EOF (empty string argument). - """ - if not data: - raise ProtocolError, 'short data' - self._bufList.append(data) - length = len(data) - self._avail += length - if self._avail > self._length: - raise ProtocolError, 'too much data' - -class Request(object): - """ - A Request object. A more fitting name would probably be Transaction, but - it's named Request to mirror my FastCGI driver. :) This object - encapsulates all the data about the HTTP request and allows the handler - to send a response. - - The only attributes/methods that the handler should concern itself - with are: environ, input, startResponse(), and write(). - """ - # Do not ever change the following value. - _maxWrite = 8192 - 4 - 3 - 1 # 8k - pkt header - send body header - NUL - - def __init__(self, conn): - self._conn = conn - - self.environ = {} - self.input = InputStream(conn) - - self._headersSent = False - - self.logger = logging.getLogger(LoggerName) - - def run(self): - self.logger.info('%s %s', - self.environ['REQUEST_METHOD'], - self.environ['REQUEST_URI']) - - start = datetime.datetime.now() - - try: - self._conn.server.handler(self) - except: - self.logger.exception('Exception caught from handler') - if not self._headersSent: - self._conn.server.error(self) - - end = datetime.datetime.now() - - # Notify server of end of response (reuse flag is set to true). - pkt = Packet() - pkt.data = PKTTYPE_END_RESPONSE + '\x01' - self._conn.writePacket(pkt) - - handlerTime = end - start - self.logger.debug('%s %s done (%.3f secs)', - self.environ['REQUEST_METHOD'], - self.environ['REQUEST_URI'], - handlerTime.seconds + - handlerTime.microseconds / 1000000.0) - - # The following methods are called from the Connection to set up this - # Request. - - def setMethod(self, value): - self.environ['REQUEST_METHOD'] = value - - def setProtocol(self, value): - self.environ['SERVER_PROTOCOL'] = value - - def setRequestURI(self, value): - self.environ['REQUEST_URI'] = value - - def setRemoteAddr(self, value): - self.environ['REMOTE_ADDR'] = value - - def setRemoteHost(self, value): - self.environ['REMOTE_HOST'] = value - - def setServerName(self, value): - self.environ['SERVER_NAME'] = value - - def setServerPort(self, value): - self.environ['SERVER_PORT'] = str(value) - - def setIsSSL(self, value): - if value: - self.environ['HTTPS'] = 'on' - - def addHeader(self, name, value): - name = name.replace('-', '_').upper() - if name in ('CONTENT_TYPE', 'CONTENT_LENGTH'): - self.environ[name] = value - if name == 'CONTENT_LENGTH': - length = int(value) - self.input.setDataLength(length) - else: - self.environ['HTTP_'+name] = value - - def addAttribute(self, name, value): - self.environ[name] = value - - # The only two methods that should be called from the handler. - - def startResponse(self, statusCode, statusMsg, headers): - """ - Begin the HTTP response. This must only be called once and it - must be called before any calls to write(). - - statusCode is the integer status code (e.g. 200). statusMsg - is the associated reason message (e.g.'OK'). headers is a list - of 2-tuples - header name/value pairs. (Both header name and value - must be strings.) - """ - assert not self._headersSent, 'Headers already sent!' - - pkt = Packet() - pkt.data = PKTTYPE_SEND_HEADERS + \ - struct.pack('>H', statusCode) + \ - encodeString(statusMsg) + \ - struct.pack('>H', len(headers)) + \ - ''.join([encodeResponseHeader(name, value) - for name,value in headers]) - - self._conn.writePacket(pkt) - - self._headersSent = True - - def write(self, data): - """ - Write data (which comprises the response body). Note that due to - restrictions on AJP packet size, we limit our writes to 8185 bytes - each packet. - """ - assert self._headersSent, 'Headers must be sent first!' - - bytesLeft = len(data) - while bytesLeft: - toWrite = min(bytesLeft, self._maxWrite) - - pkt = Packet() - pkt.data = PKTTYPE_SEND_BODY + \ - struct.pack('>H', toWrite) + \ - data[:toWrite] + '\x00' # Undocumented - self._conn.writePacket(pkt) - - data = data[toWrite:] - bytesLeft -= toWrite - -class Connection(object): - """ - A single Connection with the server. Requests are not multiplexed over the - same connection, so at any given time, the Connection is either - waiting for a request, or processing a single request. - """ - def __init__(self, sock, addr, server): - self.server = server - self._sock = sock - self._addr = addr - - self._request = None - - self.logger = logging.getLogger(LoggerName) - - def run(self): - self.logger.debug('Connection starting up (%s:%d)', - self._addr[0], self._addr[1]) - - # Main loop. Errors will cause the loop to be exited and - # the socket to be closed. - while True: - try: - self.processInput() - except ProtocolError, e: - self.logger.error("Protocol error '%s'", str(e)) - break - except (EOFError, KeyboardInterrupt): - break - except: - self.logger.exception('Exception caught in Connection') - break - - self.logger.debug('Connection shutting down (%s:%d)', - self._addr[0], self._addr[1]) - - self._sock.close() - - def processInput(self): - """Wait for and process a single packet.""" - pkt = Packet() - select.select([self._sock], [], []) - pkt.read(self._sock) - - # Body chunks have no packet type code. - if self._request is not None: - self._processBody(pkt) - return - - if not pkt.length: - raise ProtocolError, 'unexpected empty packet' - - pkttype = pkt.data[0] - if pkttype == PKTTYPE_FWD_REQ: - self._forwardRequest(pkt) - elif pkttype == PKTTYPE_SHUTDOWN: - self._shutdown(pkt) - elif pkttype == PKTTYPE_PING: - self._ping(pkt) - elif pkttype == PKTTYPE_CPING: - self._cping(pkt) - else: - raise ProtocolError, 'unknown packet type' - - def _forwardRequest(self, pkt): - """ - Creates a Request object, fills it in from the packet, then runs it. - """ - assert self._request is None - - req = self.server.requestClass(self) - i = ord(pkt.data[1]) - method = methodTable[i] - if method is None: - raise ValueError, 'bad method field' - req.setMethod(method) - value, pos = decodeString(pkt.data, 2) - req.setProtocol(value) - value, pos = decodeString(pkt.data, pos) - req.setRequestURI(value) - value, pos = decodeString(pkt.data, pos) - req.setRemoteAddr(value) - value, pos = decodeString(pkt.data, pos) - req.setRemoteHost(value) - value, pos = decodeString(pkt.data, pos) - req.setServerName(value) - value = struct.unpack('>H', pkt.data[pos:pos+2])[0] - req.setServerPort(value) - i = ord(pkt.data[pos+2]) - req.setIsSSL(i != 0) - - # Request headers. - numHeaders = struct.unpack('>H', pkt.data[pos+3:pos+5])[0] - pos += 5 - for i in range(numHeaders): - name, value, pos = decodeRequestHeader(pkt.data, pos) - req.addHeader(name, value) - - # Attributes. - while True: - name, value, pos = decodeAttribute(pkt.data, pos) - if name is None: - break - req.addAttribute(name, value) - - self._request = req - - # Read first body chunk, if needed. - if req.input.bytesAvailForAdd(): - self.processInput() - - # Run Request. - req.run() - - self._request = None - - def _shutdown(self, pkt): - """Not sure what to do with this yet.""" - self.logger.info('Received shutdown request from server') - - def _ping(self, pkt): - """I have no idea what this packet means.""" - self.logger.debug('Received ping') - - def _cping(self, pkt): - """Respond to a PING (CPING) packet.""" - self.logger.debug('Received PING, sending PONG') - pkt = Packet() - pkt.data = PKTTYPE_CPONG - self.writePacket(pkt) - - def _processBody(self, pkt): - """ - Handles a body chunk from the server by appending it to the - InputStream. - """ - if pkt.length: - length = struct.unpack('>H', pkt.data[:2])[0] - self._request.input.addData(pkt.data[2:2+length]) - else: - # Shouldn't really ever get here. - self._request.input.addData('') - - def writePacket(self, pkt): - """Sends a Packet to the server.""" - pkt.write(self._sock) - -class BaseAJPServer(object): - # What Request class to use. - requestClass = Request - - # Limits the size of the InputStream's string buffer to this size + 8k. - # Since the InputStream is not seekable, we throw away already-read - # data once this certain amount has been read. (The 8k is there because - # it is the maximum size of new data added per chunk.) - inputStreamShrinkThreshold = 102400 - 8192 - - def __init__(self, application, scriptName='', environ=None, - multithreaded=True, multiprocess=False, - bindAddress=('localhost', 8009), allowedServers=NoDefault, - loggingLevel=logging.INFO, debug=True): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - Set multithreaded to False if your application is not thread-safe. - - Set multiprocess to True to explicitly set wsgi.multiprocess to - True. (Only makes sense with threaded servers.) - - bindAddress is the address to bind to, which must be a tuple of - length 2. The first element is a string, which is the host name - or IPv4 address of a local interface. The 2nd element is the port - number. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. By default, it is a list containing - the single item '127.0.0.1'. - - loggingLevel sets the logging level of the module-level logger. - """ - if environ is None: - environ = {} - - self.application = application - self.scriptName = scriptName - self.environ = environ - self.multithreaded = multithreaded - self.multiprocess = multiprocess - self.debug = debug - self._bindAddress = bindAddress - if allowedServers is NoDefault: - allowedServers = ['127.0.0.1'] - self._allowedServers = allowedServers - - # Used to force single-threadedness. - self._appLock = thread.allocate_lock() - - self.logger = logging.getLogger(LoggerName) - self.logger.setLevel(loggingLevel) - - def _setupSocket(self): - """Creates and binds the socket for communication with the server.""" - sock = socket.socket() - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(self._bindAddress) - sock.listen(socket.SOMAXCONN) - return sock - - def _cleanupSocket(self, sock): - """Closes the main socket.""" - sock.close() - - def _isClientAllowed(self, addr): - ret = self._allowedServers is None or addr[0] in self._allowedServers - if not ret: - self.logger.warning('Server connection from %s disallowed', - addr[0]) - return ret - - def handler(self, request): - """ - WSGI handler. Sets up WSGI environment, calls the application, - and sends the application's response. - """ - environ = request.environ - environ.update(self.environ) - - environ['wsgi.version'] = (1,0) - environ['wsgi.input'] = request.input - environ['wsgi.errors'] = sys.stderr - environ['wsgi.multithread'] = self.multithreaded - environ['wsgi.multiprocess'] = self.multiprocess - environ['wsgi.run_once'] = False - - if environ.get('HTTPS', 'off') in ('on', '1'): - environ['wsgi.url_scheme'] = 'https' - else: - environ['wsgi.url_scheme'] = 'http' - - self._sanitizeEnv(environ) - - headers_set = [] - headers_sent = [] - result = None - - def write(data): - assert type(data) is str, 'write() argument must be string' - assert headers_set, 'write() before start_response()' - - if not headers_sent: - status, responseHeaders = headers_sent[:] = headers_set - statusCode = int(status[:3]) - statusMsg = status[4:] - found = False - for header,value in responseHeaders: - if header.lower() == 'content-length': - found = True - break - if not found and result is not None: - try: - if len(result) == 1: - responseHeaders.append(('Content-Length', - str(len(data)))) - except: - pass - request.startResponse(statusCode, statusMsg, responseHeaders) - - request.write(data) - - def start_response(status, response_headers, exc_info=None): - if exc_info: - try: - if headers_sent: - # Re-raise if too late - raise exc_info[0], exc_info[1], exc_info[2] - finally: - exc_info = None # avoid dangling circular ref - else: - assert not headers_set, 'Headers already set!' - - assert type(status) is str, 'Status must be a string' - assert len(status) >= 4, 'Status must be at least 4 characters' - assert int(status[:3]), 'Status must begin with 3-digit code' - assert status[3] == ' ', 'Status must have a space after code' - assert type(response_headers) is list, 'Headers must be a list' - if __debug__: - for name,val in response_headers: - assert type(name) is str, 'Header name "%s" must be a string' % name - assert type(val) is str, 'Value of header "%s" must be a string' % name - - headers_set[:] = [status, response_headers] - return write - - if not self.multithreaded: - self._appLock.acquire() - try: - try: - result = self.application(environ, start_response) - try: - for data in result: - if data: - write(data) - if not headers_sent: - write('') # in case body was empty - finally: - if hasattr(result, 'close'): - result.close() - except socket.error, e: - if e[0] != errno.EPIPE: - raise # Don't let EPIPE propagate beyond server - finally: - if not self.multithreaded: - self._appLock.release() - - def _sanitizeEnv(self, environ): - """Fill-in/deduce missing values in environ.""" - # Namely SCRIPT_NAME/PATH_INFO - value = environ['REQUEST_URI'] - scriptName = environ.get('WSGI_SCRIPT_NAME', self.scriptName) - if not value.startswith(scriptName): - self.logger.warning('scriptName does not match request URI') - - environ['PATH_INFO'] = value[len(scriptName):] - environ['SCRIPT_NAME'] = scriptName - - reqUri = None - if environ.has_key('REQUEST_URI'): - reqUri = environ['REQUEST_URI'].split('?', 1) - - if not environ.has_key('QUERY_STRING') or not environ['QUERY_STRING']: - if reqUri is not None and len(reqUri) > 1: - environ['QUERY_STRING'] = reqUri[1] - else: - environ['QUERY_STRING'] = '' - - def error(self, request): - """ - Override to provide custom error handling. Ideally, however, - all errors should be caught at the application level. - """ - if self.debug: - request.startResponse(200, 'OK', [('Content-Type', 'text/html')]) - import cgitb - request.write(cgitb.html(sys.exc_info())) - else: - errorpage = """ - -Unhandled Exception - -

Unhandled Exception

-

An unhandled exception was thrown by the application.

- -""" - request.startResponse(200, 'OK', [('Content-Type', 'text/html')]) - request.write(errorpage) diff --git a/lib/nulib/python/nulib/ext/flup/server/ajp_fork.py b/lib/nulib/python/nulib/ext/flup/server/ajp_fork.py deleted file mode 100644 index 111b29c..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/ajp_fork.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -ajp - an AJP 1.3/WSGI gateway. - -For more information about AJP and AJP connectors for your web server, see -. - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - import sys - from myapplication import app # Assume app is your WSGI application object - from ajp import WSGIServer - ret = WSGIServer(app).run() - sys.exit(ret and 42 or 0) - -See the documentation for WSGIServer for more information. - -About the bit of logic at the end: -Upon receiving SIGHUP, the python script will exit with status code 42. This -can be used by a wrapper script to determine if the python script should be -re-run. When a SIGINT or SIGTERM is received, the script exits with status -code 0, possibly indicating a normal exit. - -Example wrapper script: - - #!/bin/sh - STATUS=42 - while test $STATUS -eq 42; do - python "$@" that_script_above.py - STATUS=$? - done - -Example workers.properties (for mod_jk): - - worker.list=foo - worker.foo.port=8009 - worker.foo.host=localhost - worker.foo.type=ajp13 - -Example httpd.conf (for mod_jk): - - JkWorkersFile /path/to/workers.properties - JkMount /* foo - -Note that if you mount your ajp application anywhere but the root ("/"), you -SHOULD specifiy scriptName to the WSGIServer constructor. This will ensure -that SCRIPT_NAME/PATH_INFO are correctly deduced. -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import socket -import logging - -from flup.server.ajp_base import BaseAJPServer, Connection -from flup.server.preforkserver import PreforkServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseAJPServer, PreforkServer): - """ - AJP1.3/WSGI server. Runs your WSGI application as a persistant program - that understands AJP1.3. Opens up a TCP socket, binds it, and then - waits for forwarded requests from your webserver. - - Why AJP? Two good reasons are that AJP provides load-balancing and - fail-over support. Personally, I just wanted something new to - implement. :) - - Of course you will need an AJP1.3 connector for your webserver (e.g. - mod_jk) - see . - """ - def __init__(self, application, scriptName='', environ=None, - bindAddress=('localhost', 8009), allowedServers=None, - loggingLevel=logging.INFO, debug=True, **kw): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - bindAddress is the address to bind to, which must be a tuple of - length 2. The first element is a string, which is the host name - or IPv4 address of a local interface. The 2nd element is the port - number. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. - - loggingLevel sets the logging level of the module-level logger. - """ - BaseAJPServer.__init__(self, application, - scriptName=scriptName, - environ=environ, - multithreaded=False, - multiprocess=True, - bindAddress=bindAddress, - allowedServers=allowedServers, - loggingLevel=loggingLevel, - debug=debug) - for key in ('multithreaded', 'multiprocess', 'jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - PreforkServer.__init__(self, jobClass=Connection, jobArgs=(self,), **kw) - - def run(self): - """ - Main loop. Call this after instantiating WSGIServer. SIGHUP, SIGINT, - SIGQUIT, SIGTERM cause it to cleanup and return. (If a SIGHUP - is caught, this method returns True. Returns False otherwise.) - """ - self.logger.info('%s starting up', self.__class__.__name__) - - try: - sock = self._setupSocket() - except socket.error, e: - self.logger.error('Failed to bind socket (%s), exiting', e[1]) - return False - - ret = PreforkServer.run(self, sock) - - self._cleanupSocket(sock) - - self.logger.info('%s shutting down%s', self.__class__.__name__, - self._hupReceived and ' (reload requested)' or '') - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - # Explicitly set bindAddress to *:8009 for testing. - WSGIServer(test_app, - bindAddress=('', 8009), allowedServers=None, - loggingLevel=logging.DEBUG).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/cgi.py b/lib/nulib/python/nulib/ext/flup/server/cgi.py deleted file mode 100644 index 17cc3ca..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/cgi.py +++ /dev/null @@ -1,71 +0,0 @@ -# Taken from -# which was placed in the public domain. - -import os, sys - - -__all__ = ['WSGIServer'] - - -class WSGIServer(object): - - def __init__(self, application): - self.application = application - - def run(self): - - environ = dict(os.environ.items()) - environ['wsgi.input'] = sys.stdin - environ['wsgi.errors'] = sys.stderr - environ['wsgi.version'] = (1,0) - environ['wsgi.multithread'] = False - environ['wsgi.multiprocess'] = True - environ['wsgi.run_once'] = True - - if environ.get('HTTPS','off') in ('on','1'): - environ['wsgi.url_scheme'] = 'https' - else: - environ['wsgi.url_scheme'] = 'http' - - headers_set = [] - headers_sent = [] - - def write(data): - if not headers_set: - raise AssertionError("write() before start_response()") - - elif not headers_sent: - # Before the first output, send the stored headers - status, response_headers = headers_sent[:] = headers_set - sys.stdout.write('Status: %s\r\n' % status) - for header in response_headers: - sys.stdout.write('%s: %s\r\n' % header) - sys.stdout.write('\r\n') - - sys.stdout.write(data) - sys.stdout.flush() - - def start_response(status,response_headers,exc_info=None): - if exc_info: - try: - if headers_sent: - # Re-raise original exception if headers sent - raise exc_info[0], exc_info[1], exc_info[2] - finally: - exc_info = None # avoid dangling circular ref - elif headers_set: - raise AssertionError("Headers already set!") - - headers_set[:] = [status,response_headers] - return write - - result = self.application(environ, start_response) - try: - for data in result: - if data: # don't send headers until body appears - write(data) - if not headers_sent: - write('') # send headers now if body was empty - finally: - if hasattr(result,'close'): - result.close() diff --git a/lib/nulib/python/nulib/ext/flup/server/fcgi.py b/lib/nulib/python/nulib/ext/flup/server/fcgi.py deleted file mode 100644 index ab160e9..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/fcgi.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -fcgi - a FastCGI/WSGI gateway. - -For more information about FastCGI, see . - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - from myapplication import app # Assume app is your WSGI application object - from fcgi import WSGIServer - WSGIServer(app).run() - -See the documentation for WSGIServer for more information. - -On most platforms, fcgi will fallback to regular CGI behavior if run in a -non-FastCGI context. If you want to force CGI behavior, set the environment -variable FCGI_FORCE_CGI to "Y" or "y". -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import os - -from flup.server.fcgi_base import BaseFCGIServer, FCGI_RESPONDER -from flup.server.threadedserver import ThreadedServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseFCGIServer, ThreadedServer): - """ - FastCGI server that supports the Web Server Gateway Interface. See - . - """ - def __init__(self, application, environ=None, - multithreaded=True, multiprocess=False, - bindAddress=None, umask=None, multiplexed=False, - debug=True, roles=(FCGI_RESPONDER,), forceCGI=False, **kw): - """ - environ, if present, must be a dictionary-like object. Its - contents will be copied into application's environ. Useful - for passing application-specific variables. - - bindAddress, if present, must either be a string or a 2-tuple. If - present, run() will open its own listening socket. You would use - this if you wanted to run your application as an 'external' FastCGI - app. (i.e. the webserver would no longer be responsible for starting - your app) If a string, it will be interpreted as a filename and a UNIX - socket will be opened. If a tuple, the first element, a string, - is the interface name/IP to bind to, and the second element (an int) - is the port number. - """ - BaseFCGIServer.__init__(self, application, - environ=environ, - multithreaded=multithreaded, - multiprocess=multiprocess, - bindAddress=bindAddress, - umask=umask, - multiplexed=multiplexed, - debug=debug, - roles=roles, - forceCGI=forceCGI) - for key in ('jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - ThreadedServer.__init__(self, jobClass=self._connectionClass, - jobArgs=(self,), **kw) - - def _isClientAllowed(self, addr): - return self._web_server_addrs is None or \ - (len(addr) == 2 and addr[0] in self._web_server_addrs) - - def run(self): - """ - The main loop. Exits on SIGHUP, SIGINT, SIGTERM. Returns True if - SIGHUP was received, False otherwise. - """ - self._web_server_addrs = os.environ.get('FCGI_WEB_SERVER_ADDRS') - if self._web_server_addrs is not None: - self._web_server_addrs = map(lambda x: x.strip(), - self._web_server_addrs.split(',')) - - sock = self._setupSocket() - - ret = ThreadedServer.run(self, sock) - - self._cleanupSocket(sock) - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - WSGIServer(test_app).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/fcgi_base.py b/lib/nulib/python/nulib/ext/flup/server/fcgi_base.py deleted file mode 100644 index 31e0ac6..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/fcgi_base.py +++ /dev/null @@ -1,1188 +0,0 @@ -# Copyright (c) 2002, 2003, 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import os -import signal -import struct -import cStringIO as StringIO -import select -import socket -import errno -import traceback - -try: - import thread - import threading - thread_available = True -except ImportError: - import dummy_thread as thread - import dummy_threading as threading - thread_available = False - -# Apparently 2.3 doesn't define SHUT_WR? Assume it is 1 in this case. -if not hasattr(socket, 'SHUT_WR'): - socket.SHUT_WR = 1 - -__all__ = ['BaseFCGIServer'] - -# Constants from the spec. -FCGI_LISTENSOCK_FILENO = 0 - -FCGI_HEADER_LEN = 8 - -FCGI_VERSION_1 = 1 - -FCGI_BEGIN_REQUEST = 1 -FCGI_ABORT_REQUEST = 2 -FCGI_END_REQUEST = 3 -FCGI_PARAMS = 4 -FCGI_STDIN = 5 -FCGI_STDOUT = 6 -FCGI_STDERR = 7 -FCGI_DATA = 8 -FCGI_GET_VALUES = 9 -FCGI_GET_VALUES_RESULT = 10 -FCGI_UNKNOWN_TYPE = 11 -FCGI_MAXTYPE = FCGI_UNKNOWN_TYPE - -FCGI_NULL_REQUEST_ID = 0 - -FCGI_KEEP_CONN = 1 - -FCGI_RESPONDER = 1 -FCGI_AUTHORIZER = 2 -FCGI_FILTER = 3 - -FCGI_REQUEST_COMPLETE = 0 -FCGI_CANT_MPX_CONN = 1 -FCGI_OVERLOADED = 2 -FCGI_UNKNOWN_ROLE = 3 - -FCGI_MAX_CONNS = 'FCGI_MAX_CONNS' -FCGI_MAX_REQS = 'FCGI_MAX_REQS' -FCGI_MPXS_CONNS = 'FCGI_MPXS_CONNS' - -FCGI_Header = '!BBHHBx' -FCGI_BeginRequestBody = '!HB5x' -FCGI_EndRequestBody = '!LB3x' -FCGI_UnknownTypeBody = '!B7x' - -FCGI_EndRequestBody_LEN = struct.calcsize(FCGI_EndRequestBody) -FCGI_UnknownTypeBody_LEN = struct.calcsize(FCGI_UnknownTypeBody) - -if __debug__: - import time - - # Set non-zero to write debug output to a file. - DEBUG = 0 - DEBUGLOG = '/tmp/fcgi.log' - - def _debug(level, msg): - if DEBUG < level: - return - - try: - f = open(DEBUGLOG, 'a') - f.write('%sfcgi: %s\n' % (time.ctime()[4:-4], msg)) - f.close() - except: - pass - -class InputStream(object): - """ - File-like object representing FastCGI input streams (FCGI_STDIN and - FCGI_DATA). Supports the minimum methods required by WSGI spec. - """ - def __init__(self, conn): - self._conn = conn - - # See Server. - self._shrinkThreshold = conn.server.inputStreamShrinkThreshold - - self._buf = '' - self._bufList = [] - self._pos = 0 # Current read position. - self._avail = 0 # Number of bytes currently available. - - self._eof = False # True when server has sent EOF notification. - - def _shrinkBuffer(self): - """Gets rid of already read data (since we can't rewind).""" - if self._pos >= self._shrinkThreshold: - self._buf = self._buf[self._pos:] - self._avail -= self._pos - self._pos = 0 - - assert self._avail >= 0 - - def _waitForData(self): - """Waits for more data to become available.""" - self._conn.process_input() - - def read(self, n=-1): - if self._pos == self._avail and self._eof: - return '' - while True: - if n < 0 or (self._avail - self._pos) < n: - # Not enough data available. - if self._eof: - # And there's no more coming. - newPos = self._avail - break - else: - # Wait for more data. - self._waitForData() - continue - else: - newPos = self._pos + n - break - # Merge buffer list, if necessary. - if self._bufList: - self._buf += ''.join(self._bufList) - self._bufList = [] - r = self._buf[self._pos:newPos] - self._pos = newPos - self._shrinkBuffer() - return r - - def readline(self, length=None): - if self._pos == self._avail and self._eof: - return '' - while True: - # Unfortunately, we need to merge the buffer list early. - if self._bufList: - self._buf += ''.join(self._bufList) - self._bufList = [] - # Find newline. - i = self._buf.find('\n', self._pos) - if i < 0: - # Not found? - if self._eof: - # No more data coming. - newPos = self._avail - break - else: - if length is not None and len(self._buf) >= length + self._pos: - newPos = self._pos + length - break - # Wait for more to come. - self._waitForData() - continue - else: - newPos = i + 1 - break - r = self._buf[self._pos:newPos] - self._pos = newPos - self._shrinkBuffer() - return r - - def readlines(self, sizehint=0): - total = 0 - lines = [] - line = self.readline() - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline() - return lines - - def __iter__(self): - return self - - def next(self): - r = self.readline() - if not r: - raise StopIteration - return r - - def add_data(self, data): - if not data: - self._eof = True - else: - self._bufList.append(data) - self._avail += len(data) - -class MultiplexedInputStream(InputStream): - """ - A version of InputStream meant to be used with MultiplexedConnections. - Assumes the MultiplexedConnection (the producer) and the Request - (the consumer) are running in different threads. - """ - def __init__(self, conn): - super(MultiplexedInputStream, self).__init__(conn) - - # Arbitrates access to this InputStream (it's used simultaneously - # by a Request and its owning Connection object). - lock = threading.RLock() - - # Notifies Request thread that there is new data available. - self._lock = threading.Condition(lock) - - def _waitForData(self): - # Wait for notification from add_data(). - self._lock.wait() - - def read(self, n=-1): - self._lock.acquire() - try: - return super(MultiplexedInputStream, self).read(n) - finally: - self._lock.release() - - def readline(self, length=None): - self._lock.acquire() - try: - return super(MultiplexedInputStream, self).readline(length) - finally: - self._lock.release() - - def add_data(self, data): - self._lock.acquire() - try: - super(MultiplexedInputStream, self).add_data(data) - self._lock.notify() - finally: - self._lock.release() - -class OutputStream(object): - """ - FastCGI output stream (FCGI_STDOUT/FCGI_STDERR). By default, calls to - write() or writelines() immediately result in Records being sent back - to the server. Buffering should be done in a higher level! - """ - def __init__(self, conn, req, type, buffered=False): - self._conn = conn - self._req = req - self._type = type - self._buffered = buffered - self._bufList = [] # Used if buffered is True - self.dataWritten = False - self.closed = False - - def _write(self, data): - length = len(data) - while length: - toWrite = min(length, self._req.server.maxwrite - FCGI_HEADER_LEN) - - rec = Record(self._type, self._req.requestId) - rec.contentLength = toWrite - rec.contentData = data[:toWrite] - self._conn.writeRecord(rec) - - data = data[toWrite:] - length -= toWrite - - def write(self, data): - assert not self.closed - - if not data: - return - - self.dataWritten = True - - if self._buffered: - self._bufList.append(data) - else: - self._write(data) - - def writelines(self, lines): - assert not self.closed - - for line in lines: - self.write(line) - - def flush(self): - # Only need to flush if this OutputStream is actually buffered. - if self._buffered: - data = ''.join(self._bufList) - self._bufList = [] - self._write(data) - - # Though available, the following should NOT be called by WSGI apps. - def close(self): - """Sends end-of-stream notification, if necessary.""" - if not self.closed and self.dataWritten: - self.flush() - rec = Record(self._type, self._req.requestId) - self._conn.writeRecord(rec) - self.closed = True - -class TeeOutputStream(object): - """ - Simple wrapper around two or more output file-like objects that copies - written data to all streams. - """ - def __init__(self, streamList): - self._streamList = streamList - - def write(self, data): - for f in self._streamList: - f.write(data) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def flush(self): - for f in self._streamList: - f.flush() - -class StdoutWrapper(object): - """ - Wrapper for sys.stdout so we know if data has actually been written. - """ - def __init__(self, stdout): - self._file = stdout - self.dataWritten = False - - def write(self, data): - if data: - self.dataWritten = True - self._file.write(data) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def __getattr__(self, name): - return getattr(self._file, name) - -def decode_pair(s, pos=0): - """ - Decodes a name/value pair. - - The number of bytes decoded as well as the name/value pair - are returned. - """ - nameLength = ord(s[pos]) - if nameLength & 128: - nameLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff - pos += 4 - else: - pos += 1 - - valueLength = ord(s[pos]) - if valueLength & 128: - valueLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff - pos += 4 - else: - pos += 1 - - name = s[pos:pos+nameLength] - pos += nameLength - value = s[pos:pos+valueLength] - pos += valueLength - - return (pos, (name, value)) - -def encode_pair(name, value): - """ - Encodes a name/value pair. - - The encoded string is returned. - """ - nameLength = len(name) - if nameLength < 128: - s = chr(nameLength) - else: - s = struct.pack('!L', nameLength | 0x80000000L) - - valueLength = len(value) - if valueLength < 128: - s += chr(valueLength) - else: - s += struct.pack('!L', valueLength | 0x80000000L) - - return s + name + value - -class Record(object): - """ - A FastCGI Record. - - Used for encoding/decoding records. - """ - def __init__(self, type=FCGI_UNKNOWN_TYPE, requestId=FCGI_NULL_REQUEST_ID): - self.version = FCGI_VERSION_1 - self.type = type - self.requestId = requestId - self.contentLength = 0 - self.paddingLength = 0 - self.contentData = '' - - def _recvall(sock, length): - """ - Attempts to receive length bytes from a socket, blocking if necessary. - (Socket may be blocking or non-blocking.) - """ - dataList = [] - recvLen = 0 - while length: - try: - data = sock.recv(length) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([sock], [], []) - continue - else: - raise - if not data: # EOF - break - dataList.append(data) - dataLen = len(data) - recvLen += dataLen - length -= dataLen - return ''.join(dataList), recvLen - _recvall = staticmethod(_recvall) - - def read(self, sock): - """Read and decode a Record from a socket.""" - try: - header, length = self._recvall(sock, FCGI_HEADER_LEN) - except: - raise EOFError - - if length < FCGI_HEADER_LEN: - raise EOFError - - self.version, self.type, self.requestId, self.contentLength, \ - self.paddingLength = struct.unpack(FCGI_Header, header) - - if __debug__: _debug(9, 'read: fd = %d, type = %d, requestId = %d, ' - 'contentLength = %d' % - (sock.fileno(), self.type, self.requestId, - self.contentLength)) - - if self.contentLength: - try: - self.contentData, length = self._recvall(sock, - self.contentLength) - except: - raise EOFError - - if length < self.contentLength: - raise EOFError - - if self.paddingLength: - try: - self._recvall(sock, self.paddingLength) - except: - raise EOFError - - def _sendall(sock, data): - """ - Writes data to a socket and does not return until all the data is sent. - """ - length = len(data) - while length: - try: - sent = sock.send(data) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([], [sock], []) - continue - else: - raise - data = data[sent:] - length -= sent - _sendall = staticmethod(_sendall) - - def write(self, sock): - """Encode and write a Record to a socket.""" - self.paddingLength = -self.contentLength & 7 - - if __debug__: _debug(9, 'write: fd = %d, type = %d, requestId = %d, ' - 'contentLength = %d' % - (sock.fileno(), self.type, self.requestId, - self.contentLength)) - - header = struct.pack(FCGI_Header, self.version, self.type, - self.requestId, self.contentLength, - self.paddingLength) - self._sendall(sock, header) - if self.contentLength: - self._sendall(sock, self.contentData) - if self.paddingLength: - self._sendall(sock, '\x00'*self.paddingLength) - -class Request(object): - """ - Represents a single FastCGI request. - - These objects are passed to your handler and is the main interface - between your handler and the fcgi module. The methods should not - be called by your handler. However, server, params, stdin, stdout, - stderr, and data are free for your handler's use. - """ - def __init__(self, conn, inputStreamClass): - self._conn = conn - - self.server = conn.server - self.params = {} - self.stdin = inputStreamClass(conn) - self.stdout = OutputStream(conn, self, FCGI_STDOUT) - self.stderr = OutputStream(conn, self, FCGI_STDERR, buffered=True) - self.data = inputStreamClass(conn) - - def run(self): - """Runs the handler, flushes the streams, and ends the request.""" - try: - protocolStatus, appStatus = self.server.handler(self) - except: - traceback.print_exc(file=self.stderr) - self.stderr.flush() - if not self.stdout.dataWritten: - self.server.error(self) - - protocolStatus, appStatus = FCGI_REQUEST_COMPLETE, 0 - - if __debug__: _debug(1, 'protocolStatus = %d, appStatus = %d' % - (protocolStatus, appStatus)) - - try: - self._flush() - self._end(appStatus, protocolStatus) - except socket.error, e: - if e[0] != errno.EPIPE: - raise - - def _end(self, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE): - self._conn.end_request(self, appStatus, protocolStatus) - - def _flush(self): - self.stdout.close() - self.stderr.close() - -class CGIRequest(Request): - """A normal CGI request disguised as a FastCGI request.""" - def __init__(self, server): - # These are normally filled in by Connection. - self.requestId = 1 - self.role = FCGI_RESPONDER - self.flags = 0 - self.aborted = False - - self.server = server - self.params = dict(os.environ) - self.stdin = sys.stdin - self.stdout = StdoutWrapper(sys.stdout) # Oh, the humanity! - self.stderr = sys.stderr - self.data = StringIO.StringIO() - - def _end(self, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE): - sys.exit(appStatus) - - def _flush(self): - # Not buffered, do nothing. - pass - -class Connection(object): - """ - A Connection with the web server. - - Each Connection is associated with a single socket (which is - connected to the web server) and is responsible for handling all - the FastCGI message processing for that socket. - """ - _multiplexed = False - _inputStreamClass = InputStream - - def __init__(self, sock, addr, server): - self._sock = sock - self._addr = addr - self.server = server - - # Active Requests for this Connection, mapped by request ID. - self._requests = {} - - def _cleanupSocket(self): - """Close the Connection's socket.""" - try: - self._sock.shutdown(socket.SHUT_WR) - except: - return - try: - while True: - r, w, e = select.select([self._sock], [], []) - if not r or not self._sock.recv(1024): - break - except: - pass - self._sock.close() - - def run(self): - """Begin processing data from the socket.""" - self._keepGoing = True - while self._keepGoing: - try: - self.process_input() - except (EOFError, KeyboardInterrupt): - break - except (select.error, socket.error), e: - if e[0] == errno.EBADF: # Socket was closed by Request. - break - raise - - self._cleanupSocket() - - def process_input(self): - """Attempt to read a single Record from the socket and process it.""" - # Currently, any children Request threads notify this Connection - # that it is no longer needed by closing the Connection's socket. - # We need to put a timeout on select, otherwise we might get - # stuck in it indefinitely... (I don't like this solution.) - while self._keepGoing: - try: - r, w, e = select.select([self._sock], [], [], 1.0) - except ValueError: - # Sigh. ValueError gets thrown sometimes when passing select - # a closed socket. - raise EOFError - if r: break - if not self._keepGoing: - return - rec = Record() - rec.read(self._sock) - - if rec.type == FCGI_GET_VALUES: - self._do_get_values(rec) - elif rec.type == FCGI_BEGIN_REQUEST: - self._do_begin_request(rec) - elif rec.type == FCGI_ABORT_REQUEST: - self._do_abort_request(rec) - elif rec.type == FCGI_PARAMS: - self._do_params(rec) - elif rec.type == FCGI_STDIN: - self._do_stdin(rec) - elif rec.type == FCGI_DATA: - self._do_data(rec) - elif rec.requestId == FCGI_NULL_REQUEST_ID: - self._do_unknown_type(rec) - else: - # Need to complain about this. - pass - - def writeRecord(self, rec): - """ - Write a Record to the socket. - """ - rec.write(self._sock) - - def end_request(self, req, appStatus=0L, - protocolStatus=FCGI_REQUEST_COMPLETE, remove=True): - """ - End a Request. - - Called by Request objects. An FCGI_END_REQUEST Record is - sent to the web server. If the web server no longer requires - the connection, the socket is closed, thereby ending this - Connection (run() returns). - """ - rec = Record(FCGI_END_REQUEST, req.requestId) - rec.contentData = struct.pack(FCGI_EndRequestBody, appStatus, - protocolStatus) - rec.contentLength = FCGI_EndRequestBody_LEN - self.writeRecord(rec) - - if remove: - del self._requests[req.requestId] - - if __debug__: _debug(2, 'end_request: flags = %d' % req.flags) - - if not (req.flags & FCGI_KEEP_CONN) and not self._requests: - self._cleanupSocket() - self._keepGoing = False - - def _do_get_values(self, inrec): - """Handle an FCGI_GET_VALUES request from the web server.""" - outrec = Record(FCGI_GET_VALUES_RESULT) - - pos = 0 - while pos < inrec.contentLength: - pos, (name, value) = decode_pair(inrec.contentData, pos) - cap = self.server.capability.get(name) - if cap is not None: - outrec.contentData += encode_pair(name, str(cap)) - - outrec.contentLength = len(outrec.contentData) - self.writeRecord(outrec) - - def _do_begin_request(self, inrec): - """Handle an FCGI_BEGIN_REQUEST from the web server.""" - role, flags = struct.unpack(FCGI_BeginRequestBody, inrec.contentData) - - req = self.server.request_class(self, self._inputStreamClass) - req.requestId, req.role, req.flags = inrec.requestId, role, flags - req.aborted = False - - if not self._multiplexed and self._requests: - # Can't multiplex requests. - self.end_request(req, 0L, FCGI_CANT_MPX_CONN, remove=False) - else: - self._requests[inrec.requestId] = req - - def _do_abort_request(self, inrec): - """ - Handle an FCGI_ABORT_REQUEST from the web server. - - We just mark a flag in the associated Request. - """ - req = self._requests.get(inrec.requestId) - if req is not None: - req.aborted = True - - def _start_request(self, req): - """Run the request.""" - # Not multiplexed, so run it inline. - req.run() - - def _do_params(self, inrec): - """ - Handle an FCGI_PARAMS Record. - - If the last FCGI_PARAMS Record is received, start the request. - """ - req = self._requests.get(inrec.requestId) - if req is not None: - if inrec.contentLength: - pos = 0 - while pos < inrec.contentLength: - pos, (name, value) = decode_pair(inrec.contentData, pos) - req.params[name] = value - else: - self._start_request(req) - - def _do_stdin(self, inrec): - """Handle the FCGI_STDIN stream.""" - req = self._requests.get(inrec.requestId) - if req is not None: - req.stdin.add_data(inrec.contentData) - - def _do_data(self, inrec): - """Handle the FCGI_DATA stream.""" - req = self._requests.get(inrec.requestId) - if req is not None: - req.data.add_data(inrec.contentData) - - def _do_unknown_type(self, inrec): - """Handle an unknown request type. Respond accordingly.""" - outrec = Record(FCGI_UNKNOWN_TYPE) - outrec.contentData = struct.pack(FCGI_UnknownTypeBody, inrec.type) - outrec.contentLength = FCGI_UnknownTypeBody_LEN - self.writeRecord(rec) - -class MultiplexedConnection(Connection): - """ - A version of Connection capable of handling multiple requests - simultaneously. - """ - _multiplexed = True - _inputStreamClass = MultiplexedInputStream - - def __init__(self, sock, addr, server): - super(MultiplexedConnection, self).__init__(sock, addr, server) - - # Used to arbitrate access to self._requests. - lock = threading.RLock() - - # Notification is posted everytime a request completes, allowing us - # to quit cleanly. - self._lock = threading.Condition(lock) - - def _cleanupSocket(self): - # Wait for any outstanding requests before closing the socket. - self._lock.acquire() - while self._requests: - self._lock.wait() - self._lock.release() - - super(MultiplexedConnection, self)._cleanupSocket() - - def writeRecord(self, rec): - # Must use locking to prevent intermingling of Records from different - # threads. - self._lock.acquire() - try: - # Probably faster than calling super. ;) - rec.write(self._sock) - finally: - self._lock.release() - - def end_request(self, req, appStatus=0L, - protocolStatus=FCGI_REQUEST_COMPLETE, remove=True): - self._lock.acquire() - try: - super(MultiplexedConnection, self).end_request(req, appStatus, - protocolStatus, - remove) - self._lock.notify() - finally: - self._lock.release() - - def _do_begin_request(self, inrec): - self._lock.acquire() - try: - super(MultiplexedConnection, self)._do_begin_request(inrec) - finally: - self._lock.release() - - def _do_abort_request(self, inrec): - self._lock.acquire() - try: - super(MultiplexedConnection, self)._do_abort_request(inrec) - finally: - self._lock.release() - - def _start_request(self, req): - thread.start_new_thread(req.run, ()) - - def _do_params(self, inrec): - self._lock.acquire() - try: - super(MultiplexedConnection, self)._do_params(inrec) - finally: - self._lock.release() - - def _do_stdin(self, inrec): - self._lock.acquire() - try: - super(MultiplexedConnection, self)._do_stdin(inrec) - finally: - self._lock.release() - - def _do_data(self, inrec): - self._lock.acquire() - try: - super(MultiplexedConnection, self)._do_data(inrec) - finally: - self._lock.release() - -class BaseFCGIServer(object): - request_class = Request - cgirequest_class = CGIRequest - - # The maximum number of bytes (per Record) to write to the server. - # I've noticed mod_fastcgi has a relatively small receive buffer (8K or - # so). - maxwrite = 8192 - - # Limits the size of the InputStream's string buffer to this size + the - # server's maximum Record size. Since the InputStream is not seekable, - # we throw away already-read data once this certain amount has been read. - inputStreamShrinkThreshold = 102400 - 8192 - - def __init__(self, application, environ=None, - multithreaded=True, multiprocess=False, - bindAddress=None, umask=None, multiplexed=False, - debug=True, roles=(FCGI_RESPONDER,), - forceCGI=False): - """ - bindAddress, if present, must either be a string or a 2-tuple. If - present, run() will open its own listening socket. You would use - this if you wanted to run your application as an 'external' FastCGI - app. (i.e. the webserver would no longer be responsible for starting - your app) If a string, it will be interpreted as a filename and a UNIX - socket will be opened. If a tuple, the first element, a string, - is the interface name/IP to bind to, and the second element (an int) - is the port number. - - If binding to a UNIX socket, umask may be set to specify what - the umask is to be changed to before the socket is created in the - filesystem. After the socket is created, the previous umask is - restored. - - Set multiplexed to True if you want to handle multiple requests - per connection. Some FastCGI backends (namely mod_fastcgi) don't - multiplex requests at all, so by default this is off (which saves - on thread creation/locking overhead). If threads aren't available, - this keyword is ignored; it's not possible to multiplex requests - at all. - """ - if environ is None: - environ = {} - - self.application = application - self.environ = environ - self.multithreaded = multithreaded - self.multiprocess = multiprocess - self.debug = debug - self.roles = roles - self.forceCGI = forceCGI - - self._bindAddress = bindAddress - self._umask = umask - - # Used to force single-threadedness - self._appLock = thread.allocate_lock() - - if thread_available: - try: - import resource - # Attempt to glean the maximum number of connections - # from the OS. - maxConns = resource.getrlimit(resource.RLIMIT_NOFILE)[0] - except ImportError: - maxConns = 100 # Just some made up number. - maxReqs = maxConns - if multiplexed: - self._connectionClass = MultiplexedConnection - maxReqs *= 5 # Another made up number. - else: - self._connectionClass = Connection - self.capability = { - FCGI_MAX_CONNS: maxConns, - FCGI_MAX_REQS: maxReqs, - FCGI_MPXS_CONNS: multiplexed and 1 or 0 - } - else: - self._connectionClass = Connection - self.capability = { - # If threads aren't available, these are pretty much correct. - FCGI_MAX_CONNS: 1, - FCGI_MAX_REQS: 1, - FCGI_MPXS_CONNS: 0 - } - - def _setupSocket(self): - if self._bindAddress is None: # Run as a normal FastCGI? - isFCGI = True - - sock = socket.fromfd(FCGI_LISTENSOCK_FILENO, socket.AF_INET, - socket.SOCK_STREAM) - try: - sock.getpeername() - except socket.error, e: - if e[0] == errno.ENOTSOCK: - # Not a socket, assume CGI context. - isFCGI = False - elif e[0] != errno.ENOTCONN: - raise - - # FastCGI/CGI discrimination is broken on Mac OS X. - # Set the environment variable FCGI_FORCE_CGI to "Y" or "y" - # if you want to run your app as a simple CGI. (You can do - # this with Apache's mod_env [not loaded by default in OS X - # client, ha ha] and the SetEnv directive.) - if not isFCGI or self.forceCGI or \ - os.environ.get('FCGI_FORCE_CGI', 'N').upper().startswith('Y'): - req = self.cgirequest_class(self) - req.run() - sys.exit(0) - else: - # Run as a server - oldUmask = None - if type(self._bindAddress) is str: - # Unix socket - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - try: - os.unlink(self._bindAddress) - except OSError: - pass - if self._umask is not None: - oldUmask = os.umask(self._umask) - else: - # INET socket - assert type(self._bindAddress) is tuple - assert len(self._bindAddress) == 2 - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - - sock.bind(self._bindAddress) - sock.listen(socket.SOMAXCONN) - - if oldUmask is not None: - os.umask(oldUmask) - - return sock - - def _cleanupSocket(self, sock): - """Closes the main socket.""" - sock.close() - - def handler(self, req): - """Special handler for WSGI.""" - if req.role not in self.roles: - return FCGI_UNKNOWN_ROLE, 0 - - # Mostly taken from example CGI gateway. - environ = req.params - environ.update(self.environ) - - environ['wsgi.version'] = (1,0) - environ['wsgi.input'] = req.stdin - if self._bindAddress is None: - stderr = req.stderr - else: - stderr = TeeOutputStream((sys.stderr, req.stderr)) - environ['wsgi.errors'] = stderr - environ['wsgi.multithread'] = not isinstance(req, CGIRequest) and \ - thread_available and self.multithreaded - environ['wsgi.multiprocess'] = isinstance(req, CGIRequest) or \ - self.multiprocess - environ['wsgi.run_once'] = isinstance(req, CGIRequest) - - if environ.get('HTTPS', 'off') in ('on', '1'): - environ['wsgi.url_scheme'] = 'https' - else: - environ['wsgi.url_scheme'] = 'http' - - self._sanitizeEnv(environ) - - headers_set = [] - headers_sent = [] - result = None - - def write(data): - assert type(data) is str, 'write() argument must be string' - assert headers_set, 'write() before start_response()' - - if not headers_sent: - status, responseHeaders = headers_sent[:] = headers_set - found = False - for header,value in responseHeaders: - if header.lower() == 'content-length': - found = True - break - if not found and result is not None: - try: - if len(result) == 1: - responseHeaders.append(('Content-Length', - str(len(data)))) - except: - pass - s = 'Status: %s\r\n' % status - for header in responseHeaders: - s += '%s: %s\r\n' % header - s += '\r\n' - req.stdout.write(s) - - req.stdout.write(data) - req.stdout.flush() - - def start_response(status, response_headers, exc_info=None): - if exc_info: - try: - if headers_sent: - # Re-raise if too late - raise exc_info[0], exc_info[1], exc_info[2] - finally: - exc_info = None # avoid dangling circular ref - else: - assert not headers_set, 'Headers already set!' - - assert type(status) is str, 'Status must be a string' - assert len(status) >= 4, 'Status must be at least 4 characters' - assert int(status[:3]), 'Status must begin with 3-digit code' - assert status[3] == ' ', 'Status must have a space after code' - assert type(response_headers) is list, 'Headers must be a list' - if __debug__: - for name,val in response_headers: - assert type(name) is str, 'Header name "%s" must be a string' % name - assert type(val) is str, 'Value of header "%s" must be a string' % name - - headers_set[:] = [status, response_headers] - return write - - if not self.multithreaded: - self._appLock.acquire() - try: - try: - result = self.application(environ, start_response) - try: - for data in result: - if data: - write(data) - if not headers_sent: - write('') # in case body was empty - finally: - if hasattr(result, 'close'): - result.close() - except socket.error, e: - if e[0] != errno.EPIPE: - raise # Don't let EPIPE propagate beyond server - finally: - if not self.multithreaded: - self._appLock.release() - - return FCGI_REQUEST_COMPLETE, 0 - - def _sanitizeEnv(self, environ): - """Ensure certain values are present, if required by WSGI.""" - if not environ.has_key('SCRIPT_NAME'): - environ['SCRIPT_NAME'] = '' - - reqUri = None - if environ.has_key('REQUEST_URI'): - reqUri = environ['REQUEST_URI'].split('?', 1) - - if not environ.has_key('PATH_INFO') or not environ['PATH_INFO']: - if reqUri is not None: - environ['PATH_INFO'] = reqUri[0] - else: - environ['PATH_INFO'] = '' - if not environ.has_key('QUERY_STRING') or not environ['QUERY_STRING']: - if reqUri is not None and len(reqUri) > 1: - environ['QUERY_STRING'] = reqUri[1] - else: - environ['QUERY_STRING'] = '' - - # If any of these are missing, it probably signifies a broken - # server... - for name,default in [('REQUEST_METHOD', 'GET'), - ('SERVER_NAME', 'localhost'), - ('SERVER_PORT', '80'), - ('SERVER_PROTOCOL', 'HTTP/1.0')]: - if not environ.has_key(name): - environ['wsgi.errors'].write('%s: missing FastCGI param %s ' - 'required by WSGI!\n' % - (self.__class__.__name__, name)) - environ[name] = default - - def error(self, req): - """ - Called by Request if an exception occurs within the handler. May and - should be overridden. - """ - if self.debug: - import cgitb - req.stdout.write('Content-Type: text/html\r\n\r\n' + - cgitb.html(sys.exc_info())) - else: - errorpage = """ - -Unhandled Exception - -

Unhandled Exception

-

An unhandled exception was thrown by the application.

- -""" - req.stdout.write('Content-Type: text/html\r\n\r\n' + - errorpage) diff --git a/lib/nulib/python/nulib/ext/flup/server/fcgi_fork.py b/lib/nulib/python/nulib/ext/flup/server/fcgi_fork.py deleted file mode 100644 index d79b777..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/fcgi_fork.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -fcgi - a FastCGI/WSGI gateway. - -For more information about FastCGI, see . - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - from myapplication import app # Assume app is your WSGI application object - from fcgi import WSGIServer - WSGIServer(app).run() - -See the documentation for WSGIServer for more information. - -On most platforms, fcgi will fallback to regular CGI behavior if run in a -non-FastCGI context. If you want to force CGI behavior, set the environment -variable FCGI_FORCE_CGI to "Y" or "y". -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import os - -from flup.server.fcgi_base import BaseFCGIServer, FCGI_RESPONDER, \ - FCGI_MAX_CONNS, FCGI_MAX_REQS, FCGI_MPXS_CONNS -from flup.server.preforkserver import PreforkServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseFCGIServer, PreforkServer): - """ - FastCGI server that supports the Web Server Gateway Interface. See - . - """ - def __init__(self, application, environ=None, - bindAddress=None, umask=None, multiplexed=False, - debug=True, roles=(FCGI_RESPONDER,), forceCGI=False, **kw): - """ - environ, if present, must be a dictionary-like object. Its - contents will be copied into application's environ. Useful - for passing application-specific variables. - - bindAddress, if present, must either be a string or a 2-tuple. If - present, run() will open its own listening socket. You would use - this if you wanted to run your application as an 'external' FastCGI - app. (i.e. the webserver would no longer be responsible for starting - your app) If a string, it will be interpreted as a filename and a UNIX - socket will be opened. If a tuple, the first element, a string, - is the interface name/IP to bind to, and the second element (an int) - is the port number. - """ - BaseFCGIServer.__init__(self, application, - environ=environ, - multithreaded=False, - multiprocess=True, - bindAddress=bindAddress, - umask=umask, - multiplexed=multiplexed, - debug=debug, - roles=roles, - forceCGI=forceCGI) - for key in ('multithreaded', 'multiprocess', 'jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - PreforkServer.__init__(self, jobClass=self._connectionClass, - jobArgs=(self,), **kw) - - try: - import resource - # Attempt to glean the maximum number of connections - # from the OS. - try: - maxProcs = resource.getrlimit(resource.RLIMIT_NPROC)[0] - maxConns = resource.getrlimit(resource.RLIMIT_NOFILE)[0] - maxConns = min(maxConns, maxProcs) - except AttributeError: - maxConns = resource.getrlimit(resource.RLIMIT_NOFILE)[0] - except ImportError: - maxConns = 100 # Just some made up number. - maxReqs = maxConns - self.capability = { - FCGI_MAX_CONNS: maxConns, - FCGI_MAX_REQS: maxReqs, - FCGI_MPXS_CONNS: 0 - } - - def _isClientAllowed(self, addr): - return self._web_server_addrs is None or \ - (len(addr) == 2 and addr[0] in self._web_server_addrs) - - def run(self): - """ - The main loop. Exits on SIGHUP, SIGINT, SIGTERM. Returns True if - SIGHUP was received, False otherwise. - """ - self._web_server_addrs = os.environ.get('FCGI_WEB_SERVER_ADDRS') - if self._web_server_addrs is not None: - self._web_server_addrs = map(lambda x: x.strip(), - self._web_server_addrs.split(',')) - - sock = self._setupSocket() - - ret = PreforkServer.run(self, sock) - - self._cleanupSocket(sock) - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - WSGIServer(test_app).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/fcgi_single.py b/lib/nulib/python/nulib/ext/flup/server/fcgi_single.py deleted file mode 100644 index e62b08f..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/fcgi_single.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -fcgi - a FastCGI/WSGI gateway. - -For more information about FastCGI, see . - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - from myapplication import app # Assume app is your WSGI application object - from fcgi import WSGIServer - WSGIServer(app).run() - -See the documentation for WSGIServer for more information. - -On most platforms, fcgi will fallback to regular CGI behavior if run in a -non-FastCGI context. If you want to force CGI behavior, set the environment -variable FCGI_FORCE_CGI to "Y" or "y". -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import os - -from flup.server.fcgi_base import BaseFCGIServer, FCGI_RESPONDER, \ - FCGI_MAX_CONNS, FCGI_MAX_REQS, FCGI_MPXS_CONNS -from flup.server.singleserver import SingleServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseFCGIServer, SingleServer): - """ - FastCGI server that supports the Web Server Gateway Interface. See - . - """ - def __init__(self, application, environ=None, - bindAddress=None, umask=None, multiplexed=False, - debug=True, roles=(FCGI_RESPONDER,), forceCGI=False, **kw): - """ - environ, if present, must be a dictionary-like object. Its - contents will be copied into application's environ. Useful - for passing application-specific variables. - - bindAddress, if present, must either be a string or a 2-tuple. If - present, run() will open its own listening socket. You would use - this if you wanted to run your application as an 'external' FastCGI - app. (i.e. the webserver would no longer be responsible for starting - your app) If a string, it will be interpreted as a filename and a UNIX - socket will be opened. If a tuple, the first element, a string, - is the interface name/IP to bind to, and the second element (an int) - is the port number. - """ - BaseFCGIServer.__init__(self, application, - environ=environ, - multithreaded=False, - multiprocess=False, - bindAddress=bindAddress, - umask=umask, - multiplexed=multiplexed, - debug=debug, - roles=roles, - forceCGI=forceCGI) - for key in ('jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - SingleServer.__init__(self, jobClass=self._connectionClass, - jobArgs=(self,), **kw) - self.capability = { - FCGI_MAX_CONNS: 1, - FCGI_MAX_REQS: 1, - FCGI_MPXS_CONNS: 0 - } - - def _isClientAllowed(self, addr): - return self._web_server_addrs is None or \ - (len(addr) == 2 and addr[0] in self._web_server_addrs) - - def run(self): - """ - The main loop. Exits on SIGHUP, SIGINT, SIGTERM. Returns True if - SIGHUP was received, False otherwise. - """ - self._web_server_addrs = os.environ.get('FCGI_WEB_SERVER_ADDRS') - if self._web_server_addrs is not None: - self._web_server_addrs = map(lambda x: x.strip(), - self._web_server_addrs.split(',')) - - sock = self._setupSocket() - - ret = SingleServer.run(self, sock) - - self._cleanupSocket(sock) - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - WSGIServer(test_app).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/paste_factory.py b/lib/nulib/python/nulib/ext/flup/server/paste_factory.py deleted file mode 100644 index 1bcc867..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/paste_factory.py +++ /dev/null @@ -1,121 +0,0 @@ -# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) -# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php -def asbool(obj): - if isinstance(obj, (str, unicode)): - obj = obj.strip().lower() - if obj in ['true', 'yes', 'on', 'y', 't', '1']: - return True - elif obj in ['false', 'no', 'off', 'n', 'f', '0']: - return False - else: - raise ValueError( - "String is not true/false: %r" % obj) - return bool(obj) - -def aslist(obj, sep=None, strip=True): - if isinstance(obj, (str, unicode)): - lst = obj.split(sep) - if strip: - lst = [v.strip() for v in lst] - return lst - elif isinstance(obj, (list, tuple)): - return obj - elif obj is None: - return [] - else: - return [obj] - -def run_ajp_thread(wsgi_app, global_conf, - scriptName='', host='localhost', port='8009', - allowedServers='127.0.0.1'): - import flup.server.ajp - addr = (host, int(port)) - s = flup.server.ajp.WSGIServer( - wsgi_app, - scriptName=scriptName, - bindAddress=addr, - allowedServers=aslist(allowedServers), - ) - s.run() - -def run_ajp_fork(wsgi_app, global_conf, - scriptName='', host='localhost', port='8009', - allowedServers='127.0.0.1'): - import flup.server.ajp_fork - addr = (host, int(port)) - s = flup.server.ajp_fork.WSGIServer( - wsgi_app, - scriptName=scriptName, - bindAddress=addr, - allowedServers=aslist(allowedServers), - ) - s.run() - -def run_fcgi_thread(wsgi_app, global_conf, - host=None, port=None, - socket=None, umask=None, - multiplexed=False): - import flup.server.fcgi - if socket: - assert host is None and port is None - sock = socket - elif host: - assert host is not None and port is not None - sock = (host, int(port)) - else: - sock = None - if umask is not None: - umask = int(umask) - s = flup.server.fcgi.WSGIServer( - wsgi_app, - bindAddress=sock, umask=umask, - multiplexed=asbool(multiplexed)) - s.run() - -def run_fcgi_fork(wsgi_app, global_conf, - host=None, port=None, - socket=None, umask=None, - multiplexed=False): - import flup.server.fcgi_fork - if socket: - assert host is None and port is None - sock = socket - elif host: - assert host is not None and port is not None - sock = (host, int(port)) - else: - sock = None - if umask is not None: - umask = int(umask) - s = flup.server.fcgi_fork.WSGIServer( - wsgi_app, - bindAddress=sock, umask=umask, - multiplexed=asbool(multiplexed)) - s.run() - -def run_scgi_thread(wsgi_app, global_conf, - scriptName='', host='localhost', port='4000', - allowedServers='127.0.0.1'): - import flup.server.scgi - addr = (host, int(port)) - s = flup.server.scgi.WSGIServer( - wsgi_app, - scriptName=scriptName, - bindAddress=addr, - allowedServers=aslist(allowedServers), - ) - s.run() - -def run_scgi_fork(wsgi_app, global_conf, - scriptName='', host='localhost', port='4000', - allowedServers='127.0.0.1'): - import flup.server.scgi_fork - addr = (host, int(port)) - s = flup.server.scgi_fork.WSGIServer( - wsgi_app, - scriptName=scriptName, - bindAddress=addr, - allowedServers=aslist(allowedServers), - ) - s.run() - diff --git a/lib/nulib/python/nulib/ext/flup/server/preforkserver.py b/lib/nulib/python/nulib/ext/flup/server/preforkserver.py deleted file mode 100644 index 5eded2c..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/preforkserver.py +++ /dev/null @@ -1,433 +0,0 @@ -# Copyright (c) 2005 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import os -import socket -import select -import errno -import signal -import random -import time - -try: - import fcntl -except ImportError: - def setCloseOnExec(sock): - pass -else: - def setCloseOnExec(sock): - fcntl.fcntl(sock.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC) - -# If running Python < 2.4, require eunuchs module for socket.socketpair(). -# See . -if not hasattr(socket, 'socketpair'): - try: - import eunuchs.socketpair - except ImportError: - # TODO: Other alternatives? Perhaps using os.pipe()? - raise ImportError, 'Requires eunuchs module for Python < 2.4' - - def socketpair(): - s1, s2 = eunuchs.socketpair.socketpair() - p, c = (socket.fromfd(s1, socket.AF_UNIX, socket.SOCK_STREAM), - socket.fromfd(s2, socket.AF_UNIX, socket.SOCK_STREAM)) - os.close(s1) - os.close(s2) - return p, c - - socket.socketpair = socketpair - -class PreforkServer(object): - """ - A preforked server model conceptually similar to Apache httpd(2). At - any given time, ensures there are at least minSpare children ready to - process new requests (up to a maximum of maxChildren children total). - If the number of idle children is ever above maxSpare, the extra - children are killed. - - If maxRequests is positive, each child will only handle that many - requests in its lifetime before exiting. - - jobClass should be a class whose constructor takes at least two - arguments: the client socket and client address. jobArgs, which - must be a list or tuple, is any additional (static) arguments you - wish to pass to the constructor. - - jobClass should have a run() method (taking no arguments) that does - the actual work. When run() returns, the request is considered - complete and the child process moves to idle state. - """ - def __init__(self, minSpare=1, maxSpare=5, maxChildren=50, - maxRequests=0, jobClass=None, jobArgs=()): - self._minSpare = minSpare - self._maxSpare = maxSpare - self._maxChildren = max(maxSpare, maxChildren) - self._maxRequests = maxRequests - self._jobClass = jobClass - self._jobArgs = jobArgs - - # Internal state of children. Maps pids to dictionaries with two - # members: 'file' and 'avail'. 'file' is the socket to that - # individidual child and 'avail' is whether or not the child is - # free to process requests. - self._children = {} - - def run(self, sock): - """ - The main loop. Pass a socket that is ready to accept() client - connections. Return value will be True or False indiciating whether - or not the loop was exited due to SIGHUP. - """ - # Set up signal handlers. - self._keepGoing = True - self._hupReceived = False - self._installSignalHandlers() - - # Don't want operations on main socket to block. - sock.setblocking(0) - - # Set close-on-exec - setCloseOnExec(sock) - - # Main loop. - while self._keepGoing: - # Maintain minimum number of children. - while len(self._children) < self._maxSpare: - if not self._spawnChild(sock): break - - # Wait on any socket activity from live children. - r = [x['file'] for x in self._children.values() - if x['file'] is not None] - - if len(r) == len(self._children): - timeout = None - else: - # There are dead children that need to be reaped, ensure - # that they are by timing out, if necessary. - timeout = 2 - - try: - r, w, e = select.select(r, [], [], timeout) - except select.error, e: - if e[0] != errno.EINTR: - raise - - # Scan child sockets and tend to those that need attention. - for child in r: - # Receive status byte. - try: - state = child.recv(1) - except socket.error, e: - if e[0] in (errno.EAGAIN, errno.EINTR): - # Guess it really didn't need attention? - continue - raise - # Try to match it with a child. (Do we need a reverse map?) - for pid,d in self._children.items(): - if child is d['file']: - if state: - # Set availability status accordingly. - self._children[pid]['avail'] = state != '\x00' - else: - # Didn't receive anything. Child is most likely - # dead. - d = self._children[pid] - d['file'].close() - d['file'] = None - d['avail'] = False - - # Reap children. - self._reapChildren() - - # See who and how many children are available. - availList = filter(lambda x: x[1]['avail'], self._children.items()) - avail = len(availList) - - if avail < self._minSpare: - # Need to spawn more children. - while avail < self._minSpare and \ - len(self._children) < self._maxChildren: - if not self._spawnChild(sock): break - avail += 1 - elif avail > self._maxSpare: - # Too many spares, kill off the extras. - pids = [x[0] for x in availList] - pids.sort() - pids = pids[self._maxSpare:] - for pid in pids: - d = self._children[pid] - d['file'].close() - d['file'] = None - d['avail'] = False - - # Clean up all child processes. - self._cleanupChildren() - - # Restore signal handlers. - self._restoreSignalHandlers() - - # Return bool based on whether or not SIGHUP was received. - return self._hupReceived - - def _cleanupChildren(self): - """ - Closes all child sockets (letting those that are available know - that it's time to exit). Sends SIGINT to those that are currently - processing (and hopes that it finishses ASAP). - - Any children remaining after 10 seconds is SIGKILLed. - """ - # Let all children know it's time to go. - for pid,d in self._children.items(): - if d['file'] is not None: - d['file'].close() - d['file'] = None - if not d['avail']: - # Child is unavailable. SIGINT it. - try: - os.kill(pid, signal.SIGINT) - except OSError, e: - if e[0] != errno.ESRCH: - raise - - def alrmHandler(signum, frame): - pass - - # Set up alarm to wake us up after 10 seconds. - oldSIGALRM = signal.getsignal(signal.SIGALRM) - signal.signal(signal.SIGALRM, alrmHandler) - signal.alarm(10) - - # Wait for all children to die. - while len(self._children): - try: - pid, status = os.wait() - except OSError, e: - if e[0] in (errno.ECHILD, errno.EINTR): - break - if self._children.has_key(pid): - del self._children[pid] - - signal.signal(signal.SIGALRM, oldSIGALRM) - - # Forcefully kill any remaining children. - for pid in self._children.keys(): - try: - os.kill(pid, signal.SIGKILL) - except OSError, e: - if e[0] != errno.ESRCH: - raise - - def _reapChildren(self): - """Cleans up self._children whenever children die.""" - while True: - try: - pid, status = os.waitpid(-1, os.WNOHANG) - except OSError, e: - if e[0] == errno.ECHILD: - break - raise - if pid <= 0: - break - if self._children.has_key(pid): # Sanity check. - if self._children[pid]['file'] is not None: - self._children[pid]['file'].close() - del self._children[pid] - - def _spawnChild(self, sock): - """ - Spawn a single child. Returns True if successful, False otherwise. - """ - # This socket pair is used for very simple communication between - # the parent and its children. - parent, child = socket.socketpair() - parent.setblocking(0) - setCloseOnExec(parent) - child.setblocking(0) - setCloseOnExec(child) - try: - pid = os.fork() - except OSError, e: - if e[0] in (errno.EAGAIN, errno.ENOMEM): - return False # Can't fork anymore. - raise - if not pid: - # Child - child.close() - # Put child into its own process group. - pid = os.getpid() - os.setpgid(pid, pid) - # Restore signal handlers. - self._restoreSignalHandlers() - # Close copies of child sockets. - for f in [x['file'] for x in self._children.values() - if x['file'] is not None]: - f.close() - self._children = {} - try: - # Enter main loop. - self._child(sock, parent) - except KeyboardInterrupt: - pass - sys.exit(0) - else: - # Parent - parent.close() - d = self._children[pid] = {} - d['file'] = child - d['avail'] = True - return True - - def _isClientAllowed(self, addr): - """Override to provide access control.""" - return True - - def _notifyParent(self, parent, msg): - """Send message to parent, ignoring EPIPE and retrying on EAGAIN""" - while True: - try: - parent.send(msg) - return True - except socket.error, e: - if e[0] == errno.EPIPE: - return False # Parent is gone - if e[0] == errno.EAGAIN: - # Wait for socket change before sending again - select.select([], [parent], []) - else: - raise - - def _child(self, sock, parent): - """Main loop for children.""" - requestCount = 0 - - # Re-seed random module - preseed = '' - # urandom only exists in Python >= 2.4 - if hasattr(os, 'urandom'): - try: - preseed = os.urandom(16) - except NotImplementedError: - pass - # Have doubts about this. random.seed will just hash the string - random.seed('%s%s%s' % (preseed, os.getpid(), time.time())) - del preseed - - while True: - # Wait for any activity on the main socket or parent socket. - r, w, e = select.select([sock, parent], [], []) - - for f in r: - # If there's any activity on the parent socket, it - # means the parent wants us to die or has died itself. - # Either way, exit. - if f is parent: - return - - # Otherwise, there's activity on the main socket... - try: - clientSock, addr = sock.accept() - except socket.error, e: - if e[0] == errno.EAGAIN: - # Or maybe not. - continue - raise - - setCloseOnExec(clientSock) - - # Check if this client is allowed. - if not self._isClientAllowed(addr): - clientSock.close() - continue - - # Notify parent we're no longer available. - self._notifyParent(parent, '\x00') - - # Do the job. - self._jobClass(clientSock, addr, *self._jobArgs).run() - - # If we've serviced the maximum number of requests, exit. - if self._maxRequests > 0: - requestCount += 1 - if requestCount >= self._maxRequests: - break - - # Tell parent we're free again. - if not self._notifyParent(parent, '\xff'): - return # Parent is gone. - - # Signal handlers - - def _hupHandler(self, signum, frame): - self._keepGoing = False - self._hupReceived = True - - def _intHandler(self, signum, frame): - self._keepGoing = False - - def _chldHandler(self, signum, frame): - # Do nothing (breaks us out of select and allows us to reap children). - pass - - def _installSignalHandlers(self): - supportedSignals = [signal.SIGINT, signal.SIGTERM] - if hasattr(signal, 'SIGHUP'): - supportedSignals.append(signal.SIGHUP) - - self._oldSIGs = [(x,signal.getsignal(x)) for x in supportedSignals] - - for sig in supportedSignals: - if hasattr(signal, 'SIGHUP') and sig == signal.SIGHUP: - signal.signal(sig, self._hupHandler) - else: - signal.signal(sig, self._intHandler) - - def _restoreSignalHandlers(self): - """Restores previous signal handlers.""" - for signum,handler in self._oldSIGs: - signal.signal(signum, handler) - -if __name__ == '__main__': - class TestJob(object): - def __init__(self, sock, addr): - self._sock = sock - self._addr = addr - def run(self): - print "Client connection opened from %s:%d" % self._addr - self._sock.send('Hello World!\n') - self._sock.setblocking(1) - self._sock.recv(1) - self._sock.close() - print "Client connection closed from %s:%d" % self._addr - sock = socket.socket() - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(('', 8080)) - sock.listen(socket.SOMAXCONN) - PreforkServer(maxChildren=10, jobClass=TestJob).run(sock) diff --git a/lib/nulib/python/nulib/ext/flup/server/scgi.py b/lib/nulib/python/nulib/ext/flup/server/scgi.py deleted file mode 100644 index aad3d7b..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/scgi.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -scgi - an SCGI/WSGI gateway. - -For more information about SCGI and mod_scgi for Apache1/Apache2, see -. - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - import sys - from myapplication import app # Assume app is your WSGI application object - from scgi import WSGIServer - ret = WSGIServer(app).run() - sys.exit(ret and 42 or 0) - -See the documentation for WSGIServer for more information. - -About the bit of logic at the end: -Upon receiving SIGHUP, the python script will exit with status code 42. This -can be used by a wrapper script to determine if the python script should be -re-run. When a SIGINT or SIGTERM is received, the script exits with status -code 0, possibly indicating a normal exit. - -Example wrapper script: - - #!/bin/sh - STATUS=42 - while test $STATUS -eq 42; do - python "$@" that_script_above.py - STATUS=$? - done -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import logging -import socket - -from flup.server.scgi_base import BaseSCGIServer, Connection, NoDefault -from flup.server.threadedserver import ThreadedServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseSCGIServer, ThreadedServer): - """ - SCGI/WSGI server. For information about SCGI (Simple Common Gateway - Interface), see . - - This server is similar to SWAP , - another SCGI/WSGI server. - - It differs from SWAP in that it isn't based on scgi.scgi_server and - therefore, it allows me to implement concurrency using threads. (Also, - this server was written from scratch and really has no other depedencies.) - Which server to use really boils down to whether you want multithreading - or forking. (But as an aside, I've found scgi.scgi_server's implementation - of preforking to be quite superior. So if your application really doesn't - mind running in multiple processes, go use SWAP. ;) - """ - def __init__(self, application, scriptName=NoDefault, environ=None, - multithreaded=True, multiprocess=False, - bindAddress=('localhost', 4000), umask=None, - allowedServers=None, - loggingLevel=logging.INFO, debug=True, **kw): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - bindAddress is the address to bind to, which must be a string or - a tuple of length 2. If a tuple, the first element must be a string, - which is the host name or IPv4 address of a local interface. The - 2nd element of the tuple is the port number. If a string, it will - be interpreted as a filename and a UNIX socket will be opened. - - If binding to a UNIX socket, umask may be set to specify what - the umask is to be changed to before the socket is created in the - filesystem. After the socket is created, the previous umask is - restored. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. - - loggingLevel sets the logging level of the module-level logger. - """ - BaseSCGIServer.__init__(self, application, - scriptName=scriptName, - environ=environ, - multithreaded=multithreaded, - multiprocess=multiprocess, - bindAddress=bindAddress, - umask=umask, - allowedServers=allowedServers, - loggingLevel=loggingLevel, - debug=debug) - for key in ('jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - ThreadedServer.__init__(self, jobClass=Connection, jobArgs=(self,), - **kw) - - def run(self): - """ - Main loop. Call this after instantiating WSGIServer. SIGHUP, SIGINT, - SIGQUIT, SIGTERM cause it to cleanup and return. (If a SIGHUP - is caught, this method returns True. Returns False otherwise.) - """ - self.logger.info('%s starting up', self.__class__.__name__) - - try: - sock = self._setupSocket() - except socket.error, e: - self.logger.error('Failed to bind socket (%s), exiting', e[1]) - return False - - ret = ThreadedServer.run(self, sock) - - self._cleanupSocket(sock) - - self.logger.info('%s shutting down%s', self.__class__.__name__, - self._hupReceived and ' (reload requested)' or '') - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - WSGIServer(test_app, - loggingLevel=logging.DEBUG).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/scgi_base.py b/lib/nulib/python/nulib/ext/flup/server/scgi_base.py deleted file mode 100644 index cfa6662..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/scgi_base.py +++ /dev/null @@ -1,544 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import logging -import socket -import select -import errno -import cStringIO as StringIO -import signal -import datetime -import os -import warnings - -# Threads are required. If you want a non-threaded (forking) version, look at -# SWAP . -import thread -import threading - -__all__ = ['BaseSCGIServer'] - -class NoDefault(object): - pass - -# The main classes use this name for logging. -LoggerName = 'scgi-wsgi' - -# Set up module-level logger. -console = logging.StreamHandler() -console.setLevel(logging.DEBUG) -console.setFormatter(logging.Formatter('%(asctime)s : %(message)s', - '%Y-%m-%d %H:%M:%S')) -logging.getLogger(LoggerName).addHandler(console) -del console - -class ProtocolError(Exception): - """ - Exception raised when the server does something unexpected or - sends garbled data. Usually leads to a Connection closing. - """ - pass - -def recvall(sock, length): - """ - Attempts to receive length bytes from a socket, blocking if necessary. - (Socket may be blocking or non-blocking.) - """ - dataList = [] - recvLen = 0 - while length: - try: - data = sock.recv(length) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([sock], [], []) - continue - else: - raise - if not data: # EOF - break - dataList.append(data) - dataLen = len(data) - recvLen += dataLen - length -= dataLen - return ''.join(dataList), recvLen - -def readNetstring(sock): - """ - Attempt to read a netstring from a socket. - """ - # First attempt to read the length. - size = '' - while True: - try: - c = sock.recv(1) - except socket.error, e: - if e[0] == errno.EAGAIN: - select.select([sock], [], []) - continue - else: - raise - if c == ':': - break - if not c: - raise EOFError - size += c - - # Try to decode the length. - try: - size = int(size) - if size < 0: - raise ValueError - except ValueError: - raise ProtocolError, 'invalid netstring length' - - # Now read the string. - s, length = recvall(sock, size) - - if length < size: - raise EOFError - - # Lastly, the trailer. - trailer, length = recvall(sock, 1) - - if length < 1: - raise EOFError - - if trailer != ',': - raise ProtocolError, 'invalid netstring trailer' - - return s - -class StdoutWrapper(object): - """ - Wrapper for sys.stdout so we know if data has actually been written. - """ - def __init__(self, stdout): - self._file = stdout - self.dataWritten = False - - def write(self, data): - if data: - self.dataWritten = True - self._file.write(data) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def __getattr__(self, name): - return getattr(self._file, name) - -class Request(object): - """ - Encapsulates data related to a single request. - - Public attributes: - environ - Environment variables from web server. - stdin - File-like object representing the request body. - stdout - File-like object for writing the response. - """ - def __init__(self, conn, environ, input, output): - self._conn = conn - self.environ = environ - self.stdin = input - self.stdout = StdoutWrapper(output) - - self.logger = logging.getLogger(LoggerName) - - def run(self): - self.logger.info('%s %s%s', - self.environ['REQUEST_METHOD'], - self.environ.get('SCRIPT_NAME', ''), - self.environ.get('PATH_INFO', '')) - - start = datetime.datetime.now() - - try: - self._conn.server.handler(self) - except: - self.logger.exception('Exception caught from handler') - if not self.stdout.dataWritten: - self._conn.server.error(self) - - end = datetime.datetime.now() - - handlerTime = end - start - self.logger.debug('%s %s%s done (%.3f secs)', - self.environ['REQUEST_METHOD'], - self.environ.get('SCRIPT_NAME', ''), - self.environ.get('PATH_INFO', ''), - handlerTime.seconds + - handlerTime.microseconds / 1000000.0) - -class Connection(object): - """ - Represents a single client (web server) connection. A single request - is handled, after which the socket is closed. - """ - def __init__(self, sock, addr, server): - self._sock = sock - self._addr = addr - self.server = server - - self.logger = logging.getLogger(LoggerName) - - def run(self): - if len(self._addr) == 2: - self.logger.debug('Connection starting up (%s:%d)', - self._addr[0], self._addr[1]) - - try: - self.processInput() - except (EOFError, KeyboardInterrupt): - pass - except ProtocolError, e: - self.logger.error("Protocol error '%s'", str(e)) - except: - self.logger.exception('Exception caught in Connection') - - if len(self._addr) == 2: - self.logger.debug('Connection shutting down (%s:%d)', - self._addr[0], self._addr[1]) - - # All done! - self._sock.close() - - def processInput(self): - # Read headers - headers = readNetstring(self._sock) - headers = headers.split('\x00')[:-1] - if len(headers) % 2 != 0: - raise ProtocolError, 'invalid headers' - environ = {} - for i in range(len(headers) / 2): - environ[headers[2*i]] = headers[2*i+1] - - clen = environ.get('CONTENT_LENGTH') - if clen is None: - raise ProtocolError, 'missing CONTENT_LENGTH' - try: - clen = int(clen) - if clen < 0: - raise ValueError - except ValueError: - raise ProtocolError, 'invalid CONTENT_LENGTH' - - self._sock.setblocking(1) - if clen: - input = self._sock.makefile('r') - else: - # Empty input. - input = StringIO.StringIO() - - # stdout - output = self._sock.makefile('w') - - # Allocate Request - req = Request(self, environ, input, output) - - # Run it. - req.run() - - output.close() - input.close() - -class BaseSCGIServer(object): - # What Request class to use. - requestClass = Request - - def __init__(self, application, scriptName=NoDefault, environ=None, - multithreaded=True, multiprocess=False, - bindAddress=('localhost', 4000), umask=None, - allowedServers=NoDefault, - loggingLevel=logging.INFO, debug=True): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - Set multithreaded to False if your application is not thread-safe. - - Set multiprocess to True to explicitly set wsgi.multiprocess to - True. (Only makes sense with threaded servers.) - - bindAddress is the address to bind to, which must be a string or - a tuple of length 2. If a tuple, the first element must be a string, - which is the host name or IPv4 address of a local interface. The - 2nd element of the tuple is the port number. If a string, it will - be interpreted as a filename and a UNIX socket will be opened. - - If binding to a UNIX socket, umask may be set to specify what - the umask is to be changed to before the socket is created in the - filesystem. After the socket is created, the previous umask is - restored. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. By default, it is a list containing - the single item '127.0.0.1'. - - loggingLevel sets the logging level of the module-level logger. - """ - if environ is None: - environ = {} - - self.application = application - self.scriptName = scriptName - self.environ = environ - self.multithreaded = multithreaded - self.multiprocess = multiprocess - self.debug = debug - self._bindAddress = bindAddress - self._umask = umask - if allowedServers is NoDefault: - allowedServers = ['127.0.0.1'] - self._allowedServers = allowedServers - - # Used to force single-threadedness. - self._appLock = thread.allocate_lock() - - self.logger = logging.getLogger(LoggerName) - self.logger.setLevel(loggingLevel) - - def _setupSocket(self): - """Creates and binds the socket for communication with the server.""" - oldUmask = None - if type(self._bindAddress) is str: - # Unix socket - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - try: - os.unlink(self._bindAddress) - except OSError: - pass - if self._umask is not None: - oldUmask = os.umask(self._umask) - else: - # INET socket - assert type(self._bindAddress) is tuple - assert len(self._bindAddress) == 2 - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - - sock.bind(self._bindAddress) - sock.listen(socket.SOMAXCONN) - - if oldUmask is not None: - os.umask(oldUmask) - - return sock - - def _cleanupSocket(self, sock): - """Closes the main socket.""" - sock.close() - - def _isClientAllowed(self, addr): - ret = self._allowedServers is None or \ - len(addr) != 2 or \ - (len(addr) == 2 and addr[0] in self._allowedServers) - if not ret: - self.logger.warning('Server connection from %s disallowed', - addr[0]) - return ret - - def handler(self, request): - """ - WSGI handler. Sets up WSGI environment, calls the application, - and sends the application's response. - """ - environ = request.environ - environ.update(self.environ) - - environ['wsgi.version'] = (1,0) - environ['wsgi.input'] = request.stdin - environ['wsgi.errors'] = sys.stderr - environ['wsgi.multithread'] = self.multithreaded - environ['wsgi.multiprocess'] = self.multiprocess - environ['wsgi.run_once'] = False - - if environ.get('HTTPS', 'off') in ('on', '1'): - environ['wsgi.url_scheme'] = 'https' - else: - environ['wsgi.url_scheme'] = 'http' - - self._sanitizeEnv(environ) - - headers_set = [] - headers_sent = [] - result = None - - def write(data): - assert type(data) is str, 'write() argument must be string' - assert headers_set, 'write() before start_response()' - - if not headers_sent: - status, responseHeaders = headers_sent[:] = headers_set - found = False - for header,value in responseHeaders: - if header.lower() == 'content-length': - found = True - break - if not found and result is not None: - try: - if len(result) == 1: - responseHeaders.append(('Content-Length', - str(len(data)))) - except: - pass - s = 'Status: %s\r\n' % status - for header in responseHeaders: - s += '%s: %s\r\n' % header - s += '\r\n' - request.stdout.write(s) - - request.stdout.write(data) - request.stdout.flush() - - def start_response(status, response_headers, exc_info=None): - if exc_info: - try: - if headers_sent: - # Re-raise if too late - raise exc_info[0], exc_info[1], exc_info[2] - finally: - exc_info = None # avoid dangling circular ref - else: - assert not headers_set, 'Headers already set!' - - assert type(status) is str, 'Status must be a string' - assert len(status) >= 4, 'Status must be at least 4 characters' - assert int(status[:3]), 'Status must begin with 3-digit code' - assert status[3] == ' ', 'Status must have a space after code' - assert type(response_headers) is list, 'Headers must be a list' - if __debug__: - for name,val in response_headers: - assert type(name) is str, 'Header name "%s" must be a string' % name - assert type(val) is str, 'Value of header "%s" must be a string' % name - - headers_set[:] = [status, response_headers] - return write - - if not self.multithreaded: - self._appLock.acquire() - try: - try: - result = self.application(environ, start_response) - try: - for data in result: - if data: - write(data) - if not headers_sent: - write('') # in case body was empty - finally: - if hasattr(result, 'close'): - result.close() - except socket.error, e: - if e[0] != errno.EPIPE: - raise # Don't let EPIPE propagate beyond server - finally: - if not self.multithreaded: - self._appLock.release() - - def _sanitizeEnv(self, environ): - """Fill-in/deduce missing values in environ.""" - reqUri = None - if environ.has_key('REQUEST_URI'): - reqUri = environ['REQUEST_URI'].split('?', 1) - - # Ensure QUERY_STRING exists - if not environ.has_key('QUERY_STRING') or not environ['QUERY_STRING']: - if reqUri is not None and len(reqUri) > 1: - environ['QUERY_STRING'] = reqUri[1] - else: - environ['QUERY_STRING'] = '' - - # Check WSGI_SCRIPT_NAME - scriptName = environ.get('WSGI_SCRIPT_NAME') - if scriptName is None: - scriptName = self.scriptName - else: - warnings.warn('WSGI_SCRIPT_NAME environment variable for scgi ' - 'servers is deprecated', - DeprecationWarning) - if scriptName.lower() == 'none': - scriptName = None - - if scriptName is None: - # Do nothing (most likely coming from cgi2scgi) - return - - if scriptName is NoDefault: - # Pull SCRIPT_NAME/PATH_INFO from environment, with empty defaults - if not environ.has_key('SCRIPT_NAME'): - environ['SCRIPT_INFO'] = '' - if not environ.has_key('PATH_INFO') or not environ['PATH_INFO']: - if reqUri is not None: - environ['PATH_INFO'] = reqUri[0] - else: - environ['PATH_INFO'] = '' - else: - # Configured scriptName - warnings.warn('Configured SCRIPT_NAME is deprecated\n' - 'Do not use WSGI_SCRIPT_NAME or the scriptName\n' - 'keyword parameter -- they will be going away', - DeprecationWarning) - - value = environ['SCRIPT_NAME'] - value += environ.get('PATH_INFO', '') - if not value.startswith(scriptName): - self.logger.warning('scriptName does not match request URI') - - environ['PATH_INFO'] = value[len(scriptName):] - environ['SCRIPT_NAME'] = scriptName - - def error(self, request): - """ - Override to provide custom error handling. Ideally, however, - all errors should be caught at the application level. - """ - if self.debug: - import cgitb - request.stdout.write('Content-Type: text/html\r\n\r\n' + - cgitb.html(sys.exc_info())) - else: - errorpage = """ - -Unhandled Exception - -

Unhandled Exception

-

An unhandled exception was thrown by the application.

- -""" - request.stdout.write('Content-Type: text/html\r\n\r\n' + - errorpage) diff --git a/lib/nulib/python/nulib/ext/flup/server/scgi_fork.py b/lib/nulib/python/nulib/ext/flup/server/scgi_fork.py deleted file mode 100644 index 1c7506f..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/scgi_fork.py +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright (c) 2005, 2006 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -""" -scgi - an SCGI/WSGI gateway. - -For more information about SCGI and mod_scgi for Apache1/Apache2, see -. - -For more information about the Web Server Gateway Interface, see -. - -Example usage: - - #!/usr/bin/env python - import sys - from myapplication import app # Assume app is your WSGI application object - from scgi import WSGIServer - ret = WSGIServer(app).run() - sys.exit(ret and 42 or 0) - -See the documentation for WSGIServer for more information. - -About the bit of logic at the end: -Upon receiving SIGHUP, the python script will exit with status code 42. This -can be used by a wrapper script to determine if the python script should be -re-run. When a SIGINT or SIGTERM is received, the script exits with status -code 0, possibly indicating a normal exit. - -Example wrapper script: - - #!/bin/sh - STATUS=42 - while test $STATUS -eq 42; do - python "$@" that_script_above.py - STATUS=$? - done -""" - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import logging -import socket - -from flup.server.scgi_base import BaseSCGIServer, Connection, NoDefault -from flup.server.preforkserver import PreforkServer - -__all__ = ['WSGIServer'] - -class WSGIServer(BaseSCGIServer, PreforkServer): - """ - SCGI/WSGI server. For information about SCGI (Simple Common Gateway - Interface), see . - - This server is similar to SWAP , - another SCGI/WSGI server. - - It differs from SWAP in that it isn't based on scgi.scgi_server and - therefore, it allows me to implement concurrency using threads. (Also, - this server was written from scratch and really has no other depedencies.) - Which server to use really boils down to whether you want multithreading - or forking. (But as an aside, I've found scgi.scgi_server's implementation - of preforking to be quite superior. So if your application really doesn't - mind running in multiple processes, go use SWAP. ;) - """ - def __init__(self, application, scriptName=NoDefault, environ=None, - bindAddress=('localhost', 4000), umask=None, - allowedServers=None, - loggingLevel=logging.INFO, debug=True, **kw): - """ - scriptName is the initial portion of the URL path that "belongs" - to your application. It is used to determine PATH_INFO (which doesn't - seem to be passed in). An empty scriptName means your application - is mounted at the root of your virtual host. - - environ, which must be a dictionary, can contain any additional - environment variables you want to pass to your application. - - bindAddress is the address to bind to, which must be a string or - a tuple of length 2. If a tuple, the first element must be a string, - which is the host name or IPv4 address of a local interface. The - 2nd element of the tuple is the port number. If a string, it will - be interpreted as a filename and a UNIX socket will be opened. - - If binding to a UNIX socket, umask may be set to specify what - the umask is to be changed to before the socket is created in the - filesystem. After the socket is created, the previous umask is - restored. - - allowedServers must be None or a list of strings representing the - IPv4 addresses of servers allowed to connect. None means accept - connections from anywhere. - - loggingLevel sets the logging level of the module-level logger. - """ - BaseSCGIServer.__init__(self, application, - scriptName=scriptName, - environ=environ, - multithreaded=False, - multiprocess=True, - bindAddress=bindAddress, - umask=umask, - allowedServers=allowedServers, - loggingLevel=loggingLevel, - debug=debug) - for key in ('multithreaded', 'multiprocess', 'jobClass', 'jobArgs'): - if kw.has_key(key): - del kw[key] - PreforkServer.__init__(self, jobClass=Connection, jobArgs=(self,), **kw) - - def run(self): - """ - Main loop. Call this after instantiating WSGIServer. SIGHUP, SIGINT, - SIGQUIT, SIGTERM cause it to cleanup and return. (If a SIGHUP - is caught, this method returns True. Returns False otherwise.) - """ - self.logger.info('%s starting up', self.__class__.__name__) - - try: - sock = self._setupSocket() - except socket.error, e: - self.logger.error('Failed to bind socket (%s), exiting', e[1]) - return False - - ret = PreforkServer.run(self, sock) - - self._cleanupSocket(sock) - - self.logger.info('%s shutting down%s', self.__class__.__name__, - self._hupReceived and ' (reload requested)' or '') - - return ret - -if __name__ == '__main__': - def test_app(environ, start_response): - """Probably not the most efficient example.""" - import cgi - start_response('200 OK', [('Content-Type', 'text/html')]) - yield 'Hello World!\n' \ - '\n' \ - '

Hello World!

\n' \ - '' - names = environ.keys() - names.sort() - for name in names: - yield '\n' % ( - name, cgi.escape(`environ[name]`)) - - form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, - keep_blank_values=1) - if form.list: - yield '' - - for field in form.list: - yield '\n' % ( - field.name, field.value) - - yield '
%s%s
Form data
%s%s
\n' \ - '\n' - - from wsgiref import validate - test_app = validate.validator(test_app) - WSGIServer(test_app, - loggingLevel=logging.DEBUG).run() diff --git a/lib/nulib/python/nulib/ext/flup/server/singleserver.py b/lib/nulib/python/nulib/ext/flup/server/singleserver.py deleted file mode 100644 index 59fa6ea..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/singleserver.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2005 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import socket -import select -import signal -import errno - -try: - import fcntl -except ImportError: - def setCloseOnExec(sock): - pass -else: - def setCloseOnExec(sock): - fcntl.fcntl(sock.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC) - -__all__ = ['SingleServer'] - -class SingleServer(object): - def __init__(self, jobClass=None, jobArgs=(), **kw): - self._jobClass = jobClass - self._jobArgs = jobArgs - - def run(self, sock, timeout=1.0): - """ - The main loop. Pass a socket that is ready to accept() client - connections. Return value will be True or False indiciating whether - or not the loop was exited due to SIGHUP. - """ - # Set up signal handlers. - self._keepGoing = True - self._hupReceived = False - - # Might need to revisit this? - if not sys.platform.startswith('win'): - self._installSignalHandlers() - - # Set close-on-exec - setCloseOnExec(sock) - - # Main loop. - while self._keepGoing: - try: - r, w, e = select.select([sock], [], [], timeout) - except select.error, e: - if e[0] == errno.EINTR: - continue - raise - - if r: - try: - clientSock, addr = sock.accept() - except socket.error, e: - if e[0] in (errno.EINTR, errno.EAGAIN): - continue - raise - - setCloseOnExec(clientSock) - - if not self._isClientAllowed(addr): - clientSock.close() - continue - - # Hand off to Connection. - conn = self._jobClass(clientSock, addr, *self._jobArgs) - conn.run() - - self._mainloopPeriodic() - - # Restore signal handlers. - self._restoreSignalHandlers() - - # Return bool based on whether or not SIGHUP was received. - return self._hupReceived - - def _mainloopPeriodic(self): - """ - Called with just about each iteration of the main loop. Meant to - be overridden. - """ - pass - - def _exit(self, reload=False): - """ - Protected convenience method for subclasses to force an exit. Not - really thread-safe, which is why it isn't public. - """ - if self._keepGoing: - self._keepGoing = False - self._hupReceived = reload - - def _isClientAllowed(self, addr): - """Override to provide access control.""" - return True - - # Signal handlers - - def _hupHandler(self, signum, frame): - self._hupReceived = True - self._keepGoing = False - - def _intHandler(self, signum, frame): - self._keepGoing = False - - def _installSignalHandlers(self): - supportedSignals = [signal.SIGINT, signal.SIGTERM] - if hasattr(signal, 'SIGHUP'): - supportedSignals.append(signal.SIGHUP) - - self._oldSIGs = [(x,signal.getsignal(x)) for x in supportedSignals] - - for sig in supportedSignals: - if hasattr(signal, 'SIGHUP') and sig == signal.SIGHUP: - signal.signal(sig, self._hupHandler) - else: - signal.signal(sig, self._intHandler) - - def _restoreSignalHandlers(self): - for signum,handler in self._oldSIGs: - signal.signal(signum, handler) - -if __name__ == '__main__': - class TestJob(object): - def __init__(self, sock, addr): - self._sock = sock - self._addr = addr - def run(self): - print "Client connection opened from %s:%d" % self._addr - self._sock.send('Hello World!\n') - self._sock.setblocking(1) - self._sock.recv(1) - self._sock.close() - print "Client connection closed from %s:%d" % self._addr - sock = socket.socket() - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(('', 8080)) - sock.listen(socket.SOMAXCONN) - SingleServer(jobClass=TestJob).run(sock) diff --git a/lib/nulib/python/nulib/ext/flup/server/threadedserver.py b/lib/nulib/python/nulib/ext/flup/server/threadedserver.py deleted file mode 100644 index c232347..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/threadedserver.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright (c) 2005 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import socket -import select -import signal -import errno - -try: - import fcntl -except ImportError: - def setCloseOnExec(sock): - pass -else: - def setCloseOnExec(sock): - fcntl.fcntl(sock.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC) - -from flup.server.threadpool import ThreadPool - -__all__ = ['ThreadedServer'] - -class ThreadedServer(object): - def __init__(self, jobClass=None, jobArgs=(), **kw): - self._jobClass = jobClass - self._jobArgs = jobArgs - - self._threadPool = ThreadPool(**kw) - - def run(self, sock, timeout=1.0): - """ - The main loop. Pass a socket that is ready to accept() client - connections. Return value will be True or False indiciating whether - or not the loop was exited due to SIGHUP. - """ - # Set up signal handlers. - self._keepGoing = True - self._hupReceived = False - - # Might need to revisit this? - if not sys.platform.startswith('win'): - self._installSignalHandlers() - - # Set close-on-exec - setCloseOnExec(sock) - - # Main loop. - while self._keepGoing: - try: - r, w, e = select.select([sock], [], [], timeout) - except select.error, e: - if e[0] == errno.EINTR: - continue - raise - - if r: - try: - clientSock, addr = sock.accept() - except socket.error, e: - if e[0] in (errno.EINTR, errno.EAGAIN): - continue - raise - - setCloseOnExec(clientSock) - - if not self._isClientAllowed(addr): - clientSock.close() - continue - - # Hand off to Connection. - conn = self._jobClass(clientSock, addr, *self._jobArgs) - if not self._threadPool.addJob(conn, allowQueuing=False): - # No thread left, immediately close the socket to hopefully - # indicate to the web server that we're at our limit... - # and to prevent having too many opened (and useless) - # files. - clientSock.close() - - self._mainloopPeriodic() - - # Restore signal handlers. - self._restoreSignalHandlers() - - # Return bool based on whether or not SIGHUP was received. - return self._hupReceived - - def _mainloopPeriodic(self): - """ - Called with just about each iteration of the main loop. Meant to - be overridden. - """ - pass - - def _exit(self, reload=False): - """ - Protected convenience method for subclasses to force an exit. Not - really thread-safe, which is why it isn't public. - """ - if self._keepGoing: - self._keepGoing = False - self._hupReceived = reload - - def _isClientAllowed(self, addr): - """Override to provide access control.""" - return True - - # Signal handlers - - def _hupHandler(self, signum, frame): - self._hupReceived = True - self._keepGoing = False - - def _intHandler(self, signum, frame): - self._keepGoing = False - - def _installSignalHandlers(self): - supportedSignals = [signal.SIGINT, signal.SIGTERM] - if hasattr(signal, 'SIGHUP'): - supportedSignals.append(signal.SIGHUP) - - self._oldSIGs = [(x,signal.getsignal(x)) for x in supportedSignals] - - for sig in supportedSignals: - if hasattr(signal, 'SIGHUP') and sig == signal.SIGHUP: - signal.signal(sig, self._hupHandler) - else: - signal.signal(sig, self._intHandler) - - def _restoreSignalHandlers(self): - for signum,handler in self._oldSIGs: - signal.signal(signum, handler) - -if __name__ == '__main__': - class TestJob(object): - def __init__(self, sock, addr): - self._sock = sock - self._addr = addr - def run(self): - print "Client connection opened from %s:%d" % self._addr - self._sock.send('Hello World!\n') - self._sock.setblocking(1) - self._sock.recv(1) - self._sock.close() - print "Client connection closed from %s:%d" % self._addr - sock = socket.socket() - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(('', 8080)) - sock.listen(socket.SOMAXCONN) - ThreadedServer(maxThreads=10, jobClass=TestJob).run(sock) diff --git a/lib/nulib/python/nulib/ext/flup/server/threadpool.py b/lib/nulib/python/nulib/ext/flup/server/threadpool.py deleted file mode 100644 index a61885d..0000000 --- a/lib/nulib/python/nulib/ext/flup/server/threadpool.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) 2005 Allan Saddi -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -# $Id$ - -__author__ = 'Allan Saddi ' -__version__ = '$Revision$' - -import sys -import thread -import threading - -class ThreadPool(object): - """ - Thread pool that maintains the number of idle threads between - minSpare and maxSpare inclusive. By default, there is no limit on - the number of threads that can be started, but this can be controlled - by maxThreads. - """ - def __init__(self, minSpare=1, maxSpare=5, maxThreads=sys.maxint): - self._minSpare = minSpare - self._maxSpare = maxSpare - self._maxThreads = max(minSpare, maxThreads) - - self._lock = threading.Condition() - self._workQueue = [] - self._idleCount = self._workerCount = maxSpare - - # Start the minimum number of worker threads. - for i in range(maxSpare): - thread.start_new_thread(self._worker, ()) - - def addJob(self, job, allowQueuing=True): - """ - Adds a job to the work queue. The job object should have a run() - method. If allowQueuing is True (the default), the job will be - added to the work queue regardless if there are any idle threads - ready. (The only way for there to be no idle threads is if maxThreads - is some reasonable, finite limit.) - - Otherwise, if allowQueuing is False, and there are no more idle - threads, the job will not be queued. - - Returns True if the job was queued, False otherwise. - """ - self._lock.acquire() - try: - # Maintain minimum number of spares. - while self._idleCount < self._minSpare and \ - self._workerCount < self._maxThreads: - self._workerCount += 1 - self._idleCount += 1 - thread.start_new_thread(self._worker, ()) - - # Hand off the job. - if self._idleCount or allowQueuing: - self._workQueue.append(job) - self._lock.notify() - return True - else: - return False - finally: - self._lock.release() - - def _worker(self): - """ - Worker thread routine. Waits for a job, executes it, repeat. - """ - self._lock.acquire() - while True: - while not self._workQueue: - self._lock.wait() - - # We have a job to do... - job = self._workQueue.pop(0) - - assert self._idleCount > 0 - self._idleCount -= 1 - - self._lock.release() - - try: - job.run() - except: - # FIXME: This should really be reported somewhere. - # But we can't simply report it to stderr because of fcgi - pass - - self._lock.acquire() - - if self._idleCount == self._maxSpare: - break # NB: lock still held - self._idleCount += 1 - assert self._idleCount <= self._maxSpare - - # Die off... - assert self._workerCount > self._maxSpare - self._workerCount -= 1 - - self._lock.release() diff --git a/lib/nulib/python/nulib/ext/web/__init__.py b/lib/nulib/python/nulib/ext/web/__init__.py deleted file mode 100644 index 670dacb..0000000 --- a/lib/nulib/python/nulib/ext/web/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -"""web.py: makes web apps (http://webpy.org)""" - -from __future__ import generators - -__version__ = "0.38" -__author__ = [ - "Aaron Swartz ", - "Anand Chitipothu " -] -__license__ = "public domain" -__contributors__ = "see http://webpy.org/changes" - -import utils, db, net, wsgi, http, webapi, httpserver, debugerror -import template, form - -import session - -from utils import * -from db import * -from net import * -from wsgi import * -from http import * -from webapi import * -from httpserver import * -from debugerror import * -from application import * -from browser import * -try: - import webopenid as openid -except ImportError: - pass # requires openid module - diff --git a/lib/nulib/python/nulib/ext/web/application.py b/lib/nulib/python/nulib/ext/web/application.py deleted file mode 100644 index 668d11a..0000000 --- a/lib/nulib/python/nulib/ext/web/application.py +++ /dev/null @@ -1,740 +0,0 @@ -""" -Web application -(from web.py) -""" -import webapi as web -import webapi, wsgi, utils -import debugerror -import httpserver - -from utils import lstrips, safeunicode -import sys - -import urllib -import traceback -import itertools -import os -import types -from exceptions import SystemExit - -try: - import wsgiref.handlers -except ImportError: - pass # don't break people with old Pythons - -__all__ = [ - "application", "auto_application", - "subdir_application", "subdomain_application", - "loadhook", "unloadhook", - "autodelegate" -] - -class application: - """ - Application to delegate requests based on path. - - >>> urls = ("/hello", "hello") - >>> app = application(urls, globals()) - >>> class hello: - ... def GET(self): return "hello" - >>> - >>> app.request("/hello").data - 'hello' - """ - def __init__(self, mapping=(), fvars={}, autoreload=None): - if autoreload is None: - autoreload = web.config.get('debug', False) - self.init_mapping(mapping) - self.fvars = fvars - self.processors = [] - - self.add_processor(loadhook(self._load)) - self.add_processor(unloadhook(self._unload)) - - if autoreload: - def main_module_name(): - mod = sys.modules['__main__'] - file = getattr(mod, '__file__', None) # make sure this works even from python interpreter - return file and os.path.splitext(os.path.basename(file))[0] - - def modname(fvars): - """find name of the module name from fvars.""" - file, name = fvars.get('__file__'), fvars.get('__name__') - if file is None or name is None: - return None - - if name == '__main__': - # Since the __main__ module can't be reloaded, the module has - # to be imported using its file name. - name = main_module_name() - return name - - mapping_name = utils.dictfind(fvars, mapping) - module_name = modname(fvars) - - def reload_mapping(): - """loadhook to reload mapping and fvars.""" - mod = __import__(module_name, None, None, ['']) - mapping = getattr(mod, mapping_name, None) - if mapping: - self.fvars = mod.__dict__ - self.init_mapping(mapping) - - self.add_processor(loadhook(Reloader())) - if mapping_name and module_name: - self.add_processor(loadhook(reload_mapping)) - - # load __main__ module usings its filename, so that it can be reloaded. - if main_module_name() and '__main__' in sys.argv: - try: - __import__(main_module_name()) - except ImportError: - pass - - def _load(self): - web.ctx.app_stack.append(self) - - def _unload(self): - web.ctx.app_stack = web.ctx.app_stack[:-1] - - if web.ctx.app_stack: - # this is a sub-application, revert ctx to earlier state. - oldctx = web.ctx.get('_oldctx') - if oldctx: - web.ctx.home = oldctx.home - web.ctx.homepath = oldctx.homepath - web.ctx.path = oldctx.path - web.ctx.fullpath = oldctx.fullpath - - def _cleanup(self): - # Threads can be recycled by WSGI servers. - # Clearing up all thread-local state to avoid interefereing with subsequent requests. - utils.ThreadedDict.clear_all() - - def init_mapping(self, mapping): - self.mapping = list(utils.group(mapping, 2)) - - def add_mapping(self, pattern, classname): - self.mapping.append((pattern, classname)) - - def add_processor(self, processor): - """ - Adds a processor to the application. - - >>> urls = ("/(.*)", "echo") - >>> app = application(urls, globals()) - >>> class echo: - ... def GET(self, name): return name - ... - >>> - >>> def hello(handler): return "hello, " + handler() - ... - >>> app.add_processor(hello) - >>> app.request("/web.py").data - 'hello, web.py' - """ - self.processors.append(processor) - - def request(self, localpart='/', method='GET', data=None, - host="0.0.0.0:8080", headers=None, https=False, **kw): - """Makes request to this application for the specified path and method. - Response will be a storage object with data, status and headers. - - >>> urls = ("/hello", "hello") - >>> app = application(urls, globals()) - >>> class hello: - ... def GET(self): - ... web.header('Content-Type', 'text/plain') - ... return "hello" - ... - >>> response = app.request("/hello") - >>> response.data - 'hello' - >>> response.status - '200 OK' - >>> response.headers['Content-Type'] - 'text/plain' - - To use https, use https=True. - - >>> urls = ("/redirect", "redirect") - >>> app = application(urls, globals()) - >>> class redirect: - ... def GET(self): raise web.seeother("/foo") - ... - >>> response = app.request("/redirect") - >>> response.headers['Location'] - 'http://0.0.0.0:8080/foo' - >>> response = app.request("/redirect", https=True) - >>> response.headers['Location'] - 'https://0.0.0.0:8080/foo' - - The headers argument specifies HTTP headers as a mapping object - such as a dict. - - >>> urls = ('/ua', 'uaprinter') - >>> class uaprinter: - ... def GET(self): - ... return 'your user-agent is ' + web.ctx.env['HTTP_USER_AGENT'] - ... - >>> app = application(urls, globals()) - >>> app.request('/ua', headers = { - ... 'User-Agent': 'a small jumping bean/1.0 (compatible)' - ... }).data - 'your user-agent is a small jumping bean/1.0 (compatible)' - - """ - path, maybe_query = urllib.splitquery(localpart) - query = maybe_query or "" - - if 'env' in kw: - env = kw['env'] - else: - env = {} - env = dict(env, HTTP_HOST=host, REQUEST_METHOD=method, PATH_INFO=path, QUERY_STRING=query, HTTPS=str(https)) - headers = headers or {} - - for k, v in headers.items(): - env['HTTP_' + k.upper().replace('-', '_')] = v - - if 'HTTP_CONTENT_LENGTH' in env: - env['CONTENT_LENGTH'] = env.pop('HTTP_CONTENT_LENGTH') - - if 'HTTP_CONTENT_TYPE' in env: - env['CONTENT_TYPE'] = env.pop('HTTP_CONTENT_TYPE') - - if method not in ["HEAD", "GET"]: - data = data or '' - import StringIO - if isinstance(data, dict): - q = urllib.urlencode(data) - else: - q = data - env['wsgi.input'] = StringIO.StringIO(q) - if not env.get('CONTENT_TYPE', '').lower().startswith('multipart/') and 'CONTENT_LENGTH' not in env: - env['CONTENT_LENGTH'] = len(q) - response = web.storage() - def start_response(status, headers): - response.status = status - response.headers = dict(headers) - response.header_items = headers - response.data = "".join(self.wsgifunc()(env, start_response)) - return response - - def browser(self): - import browser - return browser.AppBrowser(self) - - def handle(self): - fn, args = self._match(self.mapping, web.ctx.path) - return self._delegate(fn, self.fvars, args) - - def handle_with_processors(self): - def process(processors): - try: - if processors: - p, processors = processors[0], processors[1:] - return p(lambda: process(processors)) - else: - return self.handle() - except web.HTTPError: - raise - except (KeyboardInterrupt, SystemExit): - raise - except: - print >> web.debug, traceback.format_exc() - raise self.internalerror() - - # processors must be applied in the resvere order. (??) - return process(self.processors) - - def wsgifunc(self, *middleware): - """Returns a WSGI-compatible function for this application.""" - def peep(iterator): - """Peeps into an iterator by doing an iteration - and returns an equivalent iterator. - """ - # wsgi requires the headers first - # so we need to do an iteration - # and save the result for later - try: - firstchunk = iterator.next() - except StopIteration: - firstchunk = '' - - return itertools.chain([firstchunk], iterator) - - def is_generator(x): return x and hasattr(x, 'next') - - def wsgi(env, start_resp): - # clear threadlocal to avoid inteference of previous requests - self._cleanup() - - self.load(env) - try: - # allow uppercase methods only - if web.ctx.method.upper() != web.ctx.method: - raise web.nomethod() - - result = self.handle_with_processors() - if is_generator(result): - result = peep(result) - else: - result = [result] - except web.HTTPError, e: - result = [e.data] - - result = web.safestr(iter(result)) - - status, headers = web.ctx.status, web.ctx.headers - start_resp(status, headers) - - def cleanup(): - self._cleanup() - yield '' # force this function to be a generator - - return itertools.chain(result, cleanup()) - - for m in middleware: - wsgi = m(wsgi) - - return wsgi - - def run(self, *middleware): - """ - Starts handling requests. If called in a CGI or FastCGI context, it will follow - that protocol. If called from the command line, it will start an HTTP - server on the port named in the first command line argument, or, if there - is no argument, on port 8080. - - `middleware` is a list of WSGI middleware which is applied to the resulting WSGI - function. - """ - return wsgi.runwsgi(self.wsgifunc(*middleware)) - - def stop(self): - """Stops the http server started by run. - """ - if httpserver.server: - httpserver.server.stop() - httpserver.server = None - - def cgirun(self, *middleware): - """ - Return a CGI handler. This is mostly useful with Google App Engine. - There you can just do: - - main = app.cgirun() - """ - wsgiapp = self.wsgifunc(*middleware) - - try: - from google.appengine.ext.webapp.util import run_wsgi_app - return run_wsgi_app(wsgiapp) - except ImportError: - # we're not running from within Google App Engine - return wsgiref.handlers.CGIHandler().run(wsgiapp) - - def gaerun(self, *middleware): - """ - Starts the program in a way that will work with Google app engine, - no matter which version you are using (2.5 / 2.7) - - If it is 2.5, just normally start it with app.gaerun() - - If it is 2.7, make sure to change the app.yaml handler to point to the - global variable that contains the result of app.gaerun() - - For example: - - in app.yaml (where code.py is where the main code is located) - - handlers: - - url: /.* - script: code.app - - Make sure that the app variable is globally accessible - """ - wsgiapp = self.wsgifunc(*middleware) - try: - # check what version of python is running - version = sys.version_info[:2] - major = version[0] - minor = version[1] - - if major != 2: - raise EnvironmentError("Google App Engine only supports python 2.5 and 2.7") - - # if 2.7, return a function that can be run by gae - if minor == 7: - return wsgiapp - # if 2.5, use run_wsgi_app - elif minor == 5: - from google.appengine.ext.webapp.util import run_wsgi_app - return run_wsgi_app(wsgiapp) - else: - raise EnvironmentError("Not a supported platform, use python 2.5 or 2.7") - except ImportError: - return wsgiref.handlers.CGIHandler().run(wsgiapp) - - def load(self, env): - """Initializes ctx using env.""" - ctx = web.ctx - ctx.clear() - ctx.status = '200 OK' - ctx.headers = [] - ctx.output = '' - ctx.environ = ctx.env = env - ctx.host = env.get('HTTP_HOST') - - if env.get('wsgi.url_scheme') in ['http', 'https']: - ctx.protocol = env['wsgi.url_scheme'] - elif env.get('HTTPS', '').lower() in ['on', 'true', '1']: - ctx.protocol = 'https' - else: - ctx.protocol = 'http' - ctx.homedomain = ctx.protocol + '://' + env.get('HTTP_HOST', '[unknown]') - ctx.homepath = os.environ.get('REAL_SCRIPT_NAME', env.get('SCRIPT_NAME', '')) - ctx.home = ctx.homedomain + ctx.homepath - #@@ home is changed when the request is handled to a sub-application. - #@@ but the real home is required for doing absolute redirects. - ctx.realhome = ctx.home - ctx.ip = env.get('REMOTE_ADDR') - ctx.method = env.get('REQUEST_METHOD') - ctx.path = env.get('PATH_INFO') or '' - # http://trac.lighttpd.net/trac/ticket/406 requires: - if env.get('SERVER_SOFTWARE', '').startswith('lighttpd/'): - ctx.path = lstrips(env.get('REQUEST_URI').split('?')[0], ctx.homepath) - # Apache and CherryPy webservers unquote the url but lighttpd doesn't. - # unquote explicitly for lighttpd to make ctx.path uniform across all servers. - ctx.path = urllib.unquote(ctx.path) - - if env.get('QUERY_STRING'): - ctx.query = '?' + env.get('QUERY_STRING', '') - else: - ctx.query = '' - - ctx.fullpath = ctx.path + ctx.query - - for k, v in ctx.iteritems(): - # convert all string values to unicode values and replace - # malformed data with a suitable replacement marker. - if isinstance(v, str): - ctx[k] = v.decode('utf-8', 'replace') - - # status must always be str - ctx.status = '200 OK' - - ctx.app_stack = [] - - _handler_configurator = None - - def set_handler_configurator(self, handler_configurator): - self._handler_configurator = handler_configurator - - def configure_handler(self, handler): - if self._handler_configurator is not None: - self._handler_configurator(handler) - - def _delegate(self, f, fvars, args=[]): - def handle_class(cls): - meth = web.ctx.method - if meth == 'HEAD' and not hasattr(cls, meth): - meth = 'GET' - if not hasattr(cls, meth): - raise web.nomethod(cls) - handler = cls() - self.configure_handler(handler) - tocall = getattr(handler, meth) - return tocall(*args) - - def is_class(o): return isinstance(o, (types.ClassType, type)) - - if f is None: - raise web.notfound() - elif isinstance(f, application): - return f.handle_with_processors() - elif is_class(f): - return handle_class(f) - elif isinstance(f, basestring): - if f.startswith('redirect '): - url = f.split(' ', 1)[1] - if web.ctx.method == "GET": - x = web.ctx.env.get('QUERY_STRING', '') - if x: - url += '?' + x - raise web.redirect(url) - elif '.' in f: - mod, cls = f.rsplit('.', 1) - mod = __import__(mod, None, None, ['']) - cls = getattr(mod, cls) - else: - cls = fvars[f] - return handle_class(cls) - elif hasattr(f, '__call__'): - return f() - else: - return web.notfound() - - def _match(self, mapping, value): - for pat, what in mapping: - if isinstance(what, application): - if value.startswith(pat): - f = lambda: self._delegate_sub_application(pat, what) - return f, None - else: - continue - elif isinstance(what, basestring): - what, result = utils.re_subm('^' + pat + '$', what, value) - else: - result = utils.re_compile('^' + pat + '$').match(value) - - if result: # it's a match - return what, [x for x in result.groups()] - return None, None - - def _delegate_sub_application(self, dir, app): - """Deletes request to sub application `app` rooted at the directory `dir`. - The home, homepath, path and fullpath values in web.ctx are updated to mimic request - to the subapp and are restored after it is handled. - - @@Any issues with when used with yield? - """ - web.ctx._oldctx = web.storage(web.ctx) - web.ctx.home += dir - web.ctx.homepath += dir - web.ctx.path = web.ctx.path[len(dir):] - web.ctx.fullpath = web.ctx.fullpath[len(dir):] - return app.handle_with_processors() - - def get_parent_app(self): - if self in web.ctx.app_stack: - index = web.ctx.app_stack.index(self) - if index > 0: - return web.ctx.app_stack[index-1] - - def notfound(self): - """Returns HTTPError with '404 not found' message""" - parent = self.get_parent_app() - if parent: - return parent.notfound() - else: - return web._NotFound() - - def internalerror(self): - """Returns HTTPError with '500 internal error' message""" - parent = self.get_parent_app() - if parent: - return parent.internalerror() - elif web.config.get('debug'): - import debugerror - return debugerror.debugerror() - else: - return web._InternalError() - -class auto_application(application): - """Application similar to `application` but urls are constructed - automatiacally using metaclass. - - >>> app = auto_application() - >>> class hello(app.page): - ... def GET(self): return "hello, world" - ... - >>> class foo(app.page): - ... path = '/foo/.*' - ... def GET(self): return "foo" - >>> app.request("/hello").data - 'hello, world' - >>> app.request('/foo/bar').data - 'foo' - """ - def __init__(self): - application.__init__(self) - - class metapage(type): - def __init__(klass, name, bases, attrs): - type.__init__(klass, name, bases, attrs) - path = attrs.get('path', '/' + name) - - # path can be specified as None to ignore that class - # typically required to create a abstract base class. - if path is not None: - self.add_mapping(path, klass) - - class page: - path = None - __metaclass__ = metapage - - self.page = page - -# The application class already has the required functionality of subdir_application -subdir_application = application - -class subdomain_application(application): - """ - Application to delegate requests based on the host. - - >>> urls = ("/hello", "hello") - >>> app = application(urls, globals()) - >>> class hello: - ... def GET(self): return "hello" - >>> - >>> mapping = (r"hello\.example\.com", app) - >>> app2 = subdomain_application(mapping) - >>> app2.request("/hello", host="hello.example.com").data - 'hello' - >>> response = app2.request("/hello", host="something.example.com") - >>> response.status - '404 Not Found' - >>> response.data - 'not found' - """ - def handle(self): - host = web.ctx.host.split(':')[0] #strip port - fn, args = self._match(self.mapping, host) - return self._delegate(fn, self.fvars, args) - - def _match(self, mapping, value): - for pat, what in mapping: - if isinstance(what, basestring): - what, result = utils.re_subm('^' + pat + '$', what, value) - else: - result = utils.re_compile('^' + pat + '$').match(value) - - if result: # it's a match - return what, [x for x in result.groups()] - return None, None - -def loadhook(h): - """ - Converts a load hook into an application processor. - - >>> app = auto_application() - >>> def f(): "something done before handling request" - ... - >>> app.add_processor(loadhook(f)) - """ - def processor(handler): - h() - return handler() - - return processor - -def unloadhook(h): - """ - Converts an unload hook into an application processor. - - >>> app = auto_application() - >>> def f(): "something done after handling request" - ... - >>> app.add_processor(unloadhook(f)) - """ - def processor(handler): - try: - result = handler() - is_generator = result and hasattr(result, 'next') - except: - # run the hook even when handler raises some exception - h() - raise - - if is_generator: - return wrap(result) - else: - h() - return result - - def wrap(result): - def next(): - try: - return result.next() - except: - # call the hook at the and of iterator - h() - raise - - result = iter(result) - while True: - yield next() - - return processor - -def autodelegate(prefix=''): - """ - Returns a method that takes one argument and calls the method named prefix+arg, - calling `notfound()` if there isn't one. Example: - - urls = ('/prefs/(.*)', 'prefs') - - class prefs: - GET = autodelegate('GET_') - def GET_password(self): pass - def GET_privacy(self): pass - - `GET_password` would get called for `/prefs/password` while `GET_privacy` for - `GET_privacy` gets called for `/prefs/privacy`. - - If a user visits `/prefs/password/change` then `GET_password(self, '/change')` - is called. - """ - def internal(self, arg): - if '/' in arg: - first, rest = arg.split('/', 1) - func = prefix + first - args = ['/' + rest] - else: - func = prefix + arg - args = [] - - if hasattr(self, func): - try: - return getattr(self, func)(*args) - except TypeError: - raise web.notfound() - else: - raise web.notfound() - return internal - -class Reloader: - """Checks to see if any loaded modules have changed on disk and, - if so, reloads them. - """ - - """File suffix of compiled modules.""" - if sys.platform.startswith('java'): - SUFFIX = '$py.class' - else: - SUFFIX = '.pyc' - - def __init__(self): - self.mtimes = {} - - def __call__(self): - for mod in sys.modules.values(): - self.check(mod) - - def check(self, mod): - # jython registers java packages as modules but they either - # don't have a __file__ attribute or its value is None - if not (mod and hasattr(mod, '__file__') and mod.__file__): - return - - try: - mtime = os.stat(mod.__file__).st_mtime - except (OSError, IOError): - return - if mod.__file__.endswith(self.__class__.SUFFIX) and os.path.exists(mod.__file__[:-1]): - mtime = max(os.stat(mod.__file__[:-1]).st_mtime, mtime) - - if mod not in self.mtimes: - self.mtimes[mod] = mtime - elif self.mtimes[mod] < mtime: - try: - reload(mod) - self.mtimes[mod] = mtime - except ImportError: - pass - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/browser.py b/lib/nulib/python/nulib/ext/web/browser.py deleted file mode 100644 index 66d859e..0000000 --- a/lib/nulib/python/nulib/ext/web/browser.py +++ /dev/null @@ -1,236 +0,0 @@ -"""Browser to test web applications. -(from web.py) -""" -from utils import re_compile -from net import htmlunquote - -import httplib, urllib, urllib2 -import copy -from StringIO import StringIO - -DEBUG = False - -__all__ = [ - "BrowserError", - "Browser", "AppBrowser", - "AppHandler" -] - -class BrowserError(Exception): - pass - -class Browser: - def __init__(self): - import cookielib - self.cookiejar = cookielib.CookieJar() - self._cookie_processor = urllib2.HTTPCookieProcessor(self.cookiejar) - self.form = None - - self.url = "http://0.0.0.0:8080/" - self.path = "/" - - self.status = None - self.data = None - self._response = None - self._forms = None - - def reset(self): - """Clears all cookies and history.""" - self.cookiejar.clear() - - def build_opener(self): - """Builds the opener using urllib2.build_opener. - Subclasses can override this function to prodive custom openers. - """ - return urllib2.build_opener() - - def do_request(self, req): - if DEBUG: - print 'requesting', req.get_method(), req.get_full_url() - opener = self.build_opener() - opener.add_handler(self._cookie_processor) - try: - self._response = opener.open(req) - except urllib2.HTTPError, e: - self._response = e - - self.url = self._response.geturl() - self.path = urllib2.Request(self.url).get_selector() - self.data = self._response.read() - self.status = self._response.code - self._forms = None - self.form = None - return self.get_response() - - def open(self, url, data=None, headers={}): - """Opens the specified url.""" - url = urllib.basejoin(self.url, url) - req = urllib2.Request(url, data, headers) - return self.do_request(req) - - def show(self): - """Opens the current page in real web browser.""" - f = open('page.html', 'w') - f.write(self.data) - f.close() - - import webbrowser, os - url = 'file://' + os.path.abspath('page.html') - webbrowser.open(url) - - def get_response(self): - """Returns a copy of the current response.""" - return urllib.addinfourl(StringIO(self.data), self._response.info(), self._response.geturl()) - - def get_soup(self): - """Returns beautiful soup of the current document.""" - import BeautifulSoup - return BeautifulSoup.BeautifulSoup(self.data) - - def get_text(self, e=None): - """Returns content of e or the current document as plain text.""" - e = e or self.get_soup() - return ''.join([htmlunquote(c) for c in e.recursiveChildGenerator() if isinstance(c, unicode)]) - - def _get_links(self): - soup = self.get_soup() - return [a for a in soup.findAll(name='a')] - - def get_links(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None): - """Returns all links in the document.""" - return self._filter_links(self._get_links(), - text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate) - - def follow_link(self, link=None, text=None, text_regex=None, url=None, url_regex=None, predicate=None): - if link is None: - links = self._filter_links(self.get_links(), - text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate) - link = links and links[0] - - if link: - return self.open(link['href']) - else: - raise BrowserError("No link found") - - def find_link(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None): - links = self._filter_links(self.get_links(), - text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate) - return links and links[0] or None - - def _filter_links(self, links, - text=None, text_regex=None, - url=None, url_regex=None, - predicate=None): - predicates = [] - if text is not None: - predicates.append(lambda link: link.string == text) - if text_regex is not None: - predicates.append(lambda link: re_compile(text_regex).search(link.string or '')) - if url is not None: - predicates.append(lambda link: link.get('href') == url) - if url_regex is not None: - predicates.append(lambda link: re_compile(url_regex).search(link.get('href', ''))) - if predicate: - predicate.append(predicate) - - def f(link): - for p in predicates: - if not p(link): - return False - return True - - return [link for link in links if f(link)] - - def get_forms(self): - """Returns all forms in the current document. - The returned form objects implement the ClientForm.HTMLForm interface. - """ - if self._forms is None: - import ClientForm - self._forms = ClientForm.ParseResponse(self.get_response(), backwards_compat=False) - return self._forms - - def select_form(self, name=None, predicate=None, index=0): - """Selects the specified form.""" - forms = self.get_forms() - - if name is not None: - forms = [f for f in forms if f.name == name] - if predicate: - forms = [f for f in forms if predicate(f)] - - if forms: - self.form = forms[index] - return self.form - else: - raise BrowserError("No form selected.") - - def submit(self, **kw): - """submits the currently selected form.""" - if self.form is None: - raise BrowserError("No form selected.") - req = self.form.click(**kw) - return self.do_request(req) - - def __getitem__(self, key): - return self.form[key] - - def __setitem__(self, key, value): - self.form[key] = value - -class AppBrowser(Browser): - """Browser interface to test web.py apps. - - b = AppBrowser(app) - b.open('/') - b.follow_link(text='Login') - - b.select_form(name='login') - b['username'] = 'joe' - b['password'] = 'secret' - b.submit() - - assert b.path == '/' - assert 'Welcome joe' in b.get_text() - """ - def __init__(self, app): - Browser.__init__(self) - self.app = app - - def build_opener(self): - return urllib2.build_opener(AppHandler(self.app)) - -class AppHandler(urllib2.HTTPHandler): - """urllib2 handler to handle requests using web.py application.""" - handler_order = 100 - - def __init__(self, app): - self.app = app - - def http_open(self, req): - result = self.app.request( - localpart=req.get_selector(), - method=req.get_method(), - host=req.get_host(), - data=req.get_data(), - headers=dict(req.header_items()), - https=req.get_type() == "https" - ) - return self._make_response(result, req.get_full_url()) - - def https_open(self, req): - return self.http_open(req) - - try: - https_request = urllib2.HTTPHandler.do_request_ - except AttributeError: - # for python 2.3 - pass - - def _make_response(self, result, url): - data = "\r\n".join(["%s: %s" % (k, v) for k, v in result.header_items]) - headers = httplib.HTTPMessage(StringIO(data)) - response = urllib.addinfourl(StringIO(result.data), headers, url) - code, msg = result.status.split(None, 1) - response.code, response.msg = int(code), msg - return response diff --git a/lib/nulib/python/nulib/ext/web/contrib/__init__.py b/lib/nulib/python/nulib/ext/web/contrib/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/lib/nulib/python/nulib/ext/web/contrib/template.py b/lib/nulib/python/nulib/ext/web/contrib/template.py deleted file mode 100644 index 7495d39..0000000 --- a/lib/nulib/python/nulib/ext/web/contrib/template.py +++ /dev/null @@ -1,131 +0,0 @@ -""" -Interface to various templating engines. -""" -import os.path - -__all__ = [ - "render_cheetah", "render_genshi", "render_mako", - "cache", -] - -class render_cheetah: - """Rendering interface to Cheetah Templates. - - Example: - - render = render_cheetah('templates') - render.hello(name="cheetah") - """ - def __init__(self, path): - # give error if Chetah is not installed - from Cheetah.Template import Template - self.path = path - - def __getattr__(self, name): - from Cheetah.Template import Template - path = os.path.join(self.path, name + ".html") - - def template(**kw): - t = Template(file=path, searchList=[kw]) - return t.respond() - - return template - -class render_genshi: - """Rendering interface genshi templates. - Example: - - for xml/html templates. - - render = render_genshi(['templates/']) - render.hello(name='genshi') - - For text templates: - - render = render_genshi(['templates/'], type='text') - render.hello(name='genshi') - """ - - def __init__(self, *a, **kwargs): - from genshi.template import TemplateLoader - - self._type = kwargs.pop('type', None) - self._loader = TemplateLoader(*a, **kwargs) - - def __getattr__(self, name): - # Assuming all templates are html - path = name + ".html" - - if self._type == "text": - from genshi.template import TextTemplate - cls = TextTemplate - type = "text" - else: - cls = None - type = None - - t = self._loader.load(path, cls=cls) - def template(**kw): - stream = t.generate(**kw) - if type: - return stream.render(type) - else: - return stream.render() - return template - -class render_jinja: - """Rendering interface to Jinja2 Templates - - Example: - - render= render_jinja('templates') - render.hello(name='jinja2') - """ - def __init__(self, *a, **kwargs): - extensions = kwargs.pop('extensions', []) - globals = kwargs.pop('globals', {}) - - from jinja2 import Environment,FileSystemLoader - self._lookup = Environment(loader=FileSystemLoader(*a, **kwargs), extensions=extensions) - self._lookup.globals.update(globals) - - def __getattr__(self, name): - # Assuming all templates end with .html - path = name + '.html' - t = self._lookup.get_template(path) - return t.render - -class render_mako: - """Rendering interface to Mako Templates. - - Example: - - render = render_mako(directories=['templates']) - render.hello(name="mako") - """ - def __init__(self, *a, **kwargs): - from mako.lookup import TemplateLookup - self._lookup = TemplateLookup(*a, **kwargs) - - def __getattr__(self, name): - # Assuming all templates are html - path = name + ".html" - t = self._lookup.get_template(path) - return t.render - -class cache: - """Cache for any rendering interface. - - Example: - - render = cache(render_cheetah("templates/")) - render.hello(name='cache') - """ - def __init__(self, render): - self._render = render - self._cache = {} - - def __getattr__(self, name): - if name not in self._cache: - self._cache[name] = getattr(self._render, name) - return self._cache[name] diff --git a/lib/nulib/python/nulib/ext/web/db.py b/lib/nulib/python/nulib/ext/web/db.py deleted file mode 100644 index e52a76d..0000000 --- a/lib/nulib/python/nulib/ext/web/db.py +++ /dev/null @@ -1,1281 +0,0 @@ -""" -Database API -(part of web.py) -""" - -__all__ = [ - "UnknownParamstyle", "UnknownDB", "TransactionError", - "sqllist", "sqlors", "reparam", "sqlquote", - "SQLQuery", "SQLParam", "sqlparam", - "SQLLiteral", "sqlliteral", - "database", 'DB', -] - -import time, os, urllib, urlparse -try: - import datetime -except ImportError: - datetime = None - -try: set -except NameError: - from sets import Set as set - -from utils import threadeddict, storage, iters, iterbetter, safestr, safeunicode - -try: - # db module can work independent of web.py - from webapi import debug, config -except: - import sys - debug = sys.stderr - config = storage() - -class UnknownDB(Exception): - """raised for unsupported dbms""" - pass - -class _ItplError(ValueError): - def __init__(self, text, pos): - ValueError.__init__(self) - self.text = text - self.pos = pos - def __str__(self): - return "unfinished expression in %s at char %d" % ( - repr(self.text), self.pos) - -class TransactionError(Exception): pass - -class UnknownParamstyle(Exception): - """ - raised for unsupported db paramstyles - - (currently supported: qmark, numeric, format, pyformat) - """ - pass - -class SQLParam(object): - """ - Parameter in SQLQuery. - - >>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam("joe")]) - >>> q - - >>> q.query() - 'SELECT * FROM test WHERE name=%s' - >>> q.values() - ['joe'] - """ - __slots__ = ["value"] - - def __init__(self, value): - self.value = value - - def get_marker(self, paramstyle='pyformat'): - if paramstyle == 'qmark': - return '?' - elif paramstyle == 'numeric': - return ':1' - elif paramstyle is None or paramstyle in ['format', 'pyformat']: - return '%s' - raise UnknownParamstyle, paramstyle - - def sqlquery(self): - return SQLQuery([self]) - - def __add__(self, other): - return self.sqlquery() + other - - def __radd__(self, other): - return other + self.sqlquery() - - def __str__(self): - return str(self.value) - - def __repr__(self): - return '' % repr(self.value) - -sqlparam = SQLParam - -class SQLQuery(object): - """ - You can pass this sort of thing as a clause in any db function. - Otherwise, you can pass a dictionary to the keyword argument `vars` - and the function will call reparam for you. - - Internally, consists of `items`, which is a list of strings and - SQLParams, which get concatenated to produce the actual query. - """ - __slots__ = ["items"] - - # tested in sqlquote's docstring - def __init__(self, items=None): - r"""Creates a new SQLQuery. - - >>> SQLQuery("x") - - >>> q = SQLQuery(['SELECT * FROM ', 'test', ' WHERE x=', SQLParam(1)]) - >>> q - - >>> q.query(), q.values() - ('SELECT * FROM test WHERE x=%s', [1]) - >>> SQLQuery(SQLParam(1)) - - """ - if items is None: - self.items = [] - elif isinstance(items, list): - self.items = items - elif isinstance(items, SQLParam): - self.items = [items] - elif isinstance(items, SQLQuery): - self.items = list(items.items) - else: - self.items = [items] - - # Take care of SQLLiterals - for i, item in enumerate(self.items): - if isinstance(item, SQLParam) and isinstance(item.value, SQLLiteral): - self.items[i] = item.value.v - - def append(self, value): - self.items.append(value) - - def __add__(self, other): - if isinstance(other, basestring): - items = [other] - elif isinstance(other, SQLQuery): - items = other.items - else: - return NotImplemented - return SQLQuery(self.items + items) - - def __radd__(self, other): - if isinstance(other, basestring): - items = [other] - else: - return NotImplemented - - return SQLQuery(items + self.items) - - def __iadd__(self, other): - if isinstance(other, (basestring, SQLParam)): - self.items.append(other) - elif isinstance(other, SQLQuery): - self.items.extend(other.items) - else: - return NotImplemented - return self - - def __len__(self): - return len(self.query()) - - def query(self, paramstyle=None): - """ - Returns the query part of the sql query. - >>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam('joe')]) - >>> q.query() - 'SELECT * FROM test WHERE name=%s' - >>> q.query(paramstyle='qmark') - 'SELECT * FROM test WHERE name=?' - """ - s = [] - for x in self.items: - if isinstance(x, SQLParam): - x = x.get_marker(paramstyle) - s.append(safestr(x)) - else: - x = safestr(x) - # automatically escape % characters in the query - # For backward compatability, ignore escaping when the query looks already escaped - if paramstyle in ['format', 'pyformat']: - if '%' in x and '%%' not in x: - x = x.replace('%', '%%') - s.append(x) - return "".join(s) - - def values(self): - """ - Returns the values of the parameters used in the sql query. - >>> q = SQLQuery(["SELECT * FROM test WHERE name=", SQLParam('joe')]) - >>> q.values() - ['joe'] - """ - return [i.value for i in self.items if isinstance(i, SQLParam)] - - def join(items, sep=' ', prefix=None, suffix=None, target=None): - """ - Joins multiple queries. - - >>> SQLQuery.join(['a', 'b'], ', ') - - - Optinally, prefix and suffix arguments can be provided. - - >>> SQLQuery.join(['a', 'b'], ', ', prefix='(', suffix=')') - - - If target argument is provided, the items are appended to target instead of creating a new SQLQuery. - """ - if target is None: - target = SQLQuery() - - target_items = target.items - - if prefix: - target_items.append(prefix) - - for i, item in enumerate(items): - if i != 0: - target_items.append(sep) - if isinstance(item, SQLQuery): - target_items.extend(item.items) - else: - target_items.append(item) - - if suffix: - target_items.append(suffix) - return target - - join = staticmethod(join) - - def _str(self): - try: - return self.query() % tuple([sqlify(x) for x in self.values()]) - except (ValueError, TypeError): - return self.query() - - def __str__(self): - return safestr(self._str()) - - def __unicode__(self): - return safeunicode(self._str()) - - def __repr__(self): - return '' % repr(str(self)) - -class SQLLiteral: - """ - Protects a string from `sqlquote`. - - >>> sqlquote('NOW()') - - >>> sqlquote(SQLLiteral('NOW()')) - - """ - def __init__(self, v): - self.v = v - - def __repr__(self): - return self.v - -sqlliteral = SQLLiteral - -def _sqllist(values): - """ - >>> _sqllist([1, 2, 3]) - - """ - items = [] - items.append('(') - for i, v in enumerate(values): - if i != 0: - items.append(', ') - items.append(sqlparam(v)) - items.append(')') - return SQLQuery(items) - -def reparam(string_, dictionary): - """ - Takes a string and a dictionary and interpolates the string - using values from the dictionary. Returns an `SQLQuery` for the result. - - >>> reparam("s = $s", dict(s=True)) - - >>> reparam("s IN $s", dict(s=[1, 2])) - - """ - dictionary = dictionary.copy() # eval mucks with it - # disable builtins to avoid risk for remote code exection. - dictionary['__builtins__'] = object() - vals = [] - result = [] - for live, chunk in _interpolate(string_): - if live: - v = eval(chunk, dictionary) - result.append(sqlquote(v)) - else: - result.append(chunk) - return SQLQuery.join(result, '') - -def sqlify(obj): - """ - converts `obj` to its proper SQL version - - >>> sqlify(None) - 'NULL' - >>> sqlify(True) - "'t'" - >>> sqlify(3) - '3' - """ - # because `1 == True and hash(1) == hash(True)` - # we have to do this the hard way... - - if obj is None: - return 'NULL' - elif obj is True: - return "'t'" - elif obj is False: - return "'f'" - elif isinstance(obj, long): - return str(obj) - elif datetime and isinstance(obj, datetime.datetime): - return repr(obj.isoformat()) - else: - if isinstance(obj, unicode): obj = obj.encode('utf8') - return repr(obj) - -def sqllist(lst): - """ - Converts the arguments for use in something like a WHERE clause. - - >>> sqllist(['a', 'b']) - 'a, b' - >>> sqllist('a') - 'a' - >>> sqllist(u'abc') - u'abc' - """ - if isinstance(lst, basestring): - return lst - else: - return ', '.join(lst) - -def sqlors(left, lst): - """ - `left is a SQL clause like `tablename.arg = ` - and `lst` is a list of values. Returns a reparam-style - pair featuring the SQL that ORs together the clause - for each item in the lst. - - >>> sqlors('foo = ', []) - - >>> sqlors('foo = ', [1]) - - >>> sqlors('foo = ', 1) - - >>> sqlors('foo = ', [1,2,3]) - - """ - if isinstance(lst, iters): - lst = list(lst) - ln = len(lst) - if ln == 0: - return SQLQuery("1=2") - if ln == 1: - lst = lst[0] - - if isinstance(lst, iters): - return SQLQuery(['('] + - sum([[left, sqlparam(x), ' OR '] for x in lst], []) + - ['1=2)'] - ) - else: - return left + sqlparam(lst) - -def sqlwhere(dictionary, grouping=' AND '): - """ - Converts a `dictionary` to an SQL WHERE clause `SQLQuery`. - - >>> sqlwhere({'cust_id': 2, 'order_id':3}) - - >>> sqlwhere({'cust_id': 2, 'order_id':3}, grouping=', ') - - >>> sqlwhere({'a': 'a', 'b': 'b'}).query() - 'a = %s AND b = %s' - """ - return SQLQuery.join([k + ' = ' + sqlparam(v) for k, v in dictionary.items()], grouping) - -def sqlquote(a): - """ - Ensures `a` is quoted properly for use in a SQL query. - - >>> 'WHERE x = ' + sqlquote(True) + ' AND y = ' + sqlquote(3) - - >>> 'WHERE x = ' + sqlquote(True) + ' AND y IN ' + sqlquote([2, 3]) - - """ - if isinstance(a, list): - return _sqllist(a) - else: - return sqlparam(a).sqlquery() - -class Transaction: - """Database transaction.""" - def __init__(self, ctx): - self.ctx = ctx - self.transaction_count = transaction_count = len(ctx.transactions) - - class transaction_engine: - """Transaction Engine used in top level transactions.""" - def do_transact(self): - ctx.commit(unload=False) - - def do_commit(self): - ctx.commit() - - def do_rollback(self): - ctx.rollback() - - class subtransaction_engine: - """Transaction Engine used in sub transactions.""" - def query(self, q): - db_cursor = ctx.db.cursor() - ctx.db_execute(db_cursor, SQLQuery(q % transaction_count)) - - def do_transact(self): - self.query('SAVEPOINT webpy_sp_%s') - - def do_commit(self): - self.query('RELEASE SAVEPOINT webpy_sp_%s') - - def do_rollback(self): - self.query('ROLLBACK TO SAVEPOINT webpy_sp_%s') - - class dummy_engine: - """Transaction Engine used instead of subtransaction_engine - when sub transactions are not supported.""" - do_transact = do_commit = do_rollback = lambda self: None - - if self.transaction_count: - # nested transactions are not supported in some databases - if self.ctx.get('ignore_nested_transactions'): - self.engine = dummy_engine() - else: - self.engine = subtransaction_engine() - else: - self.engine = transaction_engine() - - self.engine.do_transact() - self.ctx.transactions.append(self) - - def __enter__(self): - return self - - def __exit__(self, exctype, excvalue, traceback): - if exctype is not None: - self.rollback() - else: - self.commit() - - def commit(self): - if len(self.ctx.transactions) > self.transaction_count: - self.engine.do_commit() - self.ctx.transactions = self.ctx.transactions[:self.transaction_count] - - def rollback(self): - if len(self.ctx.transactions) > self.transaction_count: - self.engine.do_rollback() - self.ctx.transactions = self.ctx.transactions[:self.transaction_count] - -class DB: - """Database""" - def __init__(self, db_module, keywords): - """Creates a database. - """ - # some DB implementaions take optional paramater `driver` to use a specific driver modue - # but it should not be passed to connect - keywords.pop('driver', None) - - self.db_module = db_module - self.keywords = keywords - - self._ctx = threadeddict() - # flag to enable/disable printing queries - self.printing = config.get('debug_sql', config.get('debug', False)) - self.supports_multiple_insert = False - - try: - import DBUtils - # enable pooling if DBUtils module is available. - self.has_pooling = True - except ImportError: - self.has_pooling = False - - # Pooling can be disabled by passing pooling=False in the keywords. - self.has_pooling = self.keywords.pop('pooling', True) and self.has_pooling - - def _getctx(self): - if not self._ctx.get('db'): - self._load_context(self._ctx) - return self._ctx - ctx = property(_getctx) - - def _load_context(self, ctx): - ctx.dbq_count = 0 - ctx.transactions = [] # stack of transactions - - if self.has_pooling: - ctx.db = self._connect_with_pooling(self.keywords) - else: - ctx.db = self._connect(self.keywords) - ctx.db_execute = self._db_execute - - if not hasattr(ctx.db, 'commit'): - ctx.db.commit = lambda: None - - if not hasattr(ctx.db, 'rollback'): - ctx.db.rollback = lambda: None - - def commit(unload=True): - # do db commit and release the connection if pooling is enabled. - ctx.db.commit() - if unload and self.has_pooling: - self._unload_context(self._ctx) - - def rollback(): - # do db rollback and release the connection if pooling is enabled. - ctx.db.rollback() - if self.has_pooling: - self._unload_context(self._ctx) - - ctx.commit = commit - ctx.rollback = rollback - - def _unload_context(self, ctx): - del ctx.db - - def _connect(self, keywords): - return self.db_module.connect(**keywords) - - def _connect_with_pooling(self, keywords): - def get_pooled_db(): - from DBUtils import PooledDB - - # In DBUtils 0.9.3, `dbapi` argument is renamed as `creator` - # see Bug#122112 - - if PooledDB.__version__.split('.') < '0.9.3'.split('.'): - return PooledDB.PooledDB(dbapi=self.db_module, **keywords) - else: - return PooledDB.PooledDB(creator=self.db_module, **keywords) - - if getattr(self, '_pooleddb', None) is None: - self._pooleddb = get_pooled_db() - - return self._pooleddb.connection() - - def _db_cursor(self): - return self.ctx.db.cursor() - - def _param_marker(self): - """Returns parameter marker based on paramstyle attribute if this database.""" - style = getattr(self, 'paramstyle', 'pyformat') - - if style == 'qmark': - return '?' - elif style == 'numeric': - return ':1' - elif style in ['format', 'pyformat']: - return '%s' - raise UnknownParamstyle, style - - def _db_execute(self, cur, sql_query): - """executes an sql query""" - self.ctx.dbq_count += 1 - - try: - a = time.time() - query, params = self._process_query(sql_query) - out = cur.execute(query, params) - b = time.time() - except: - if self.printing: - print >> debug, 'ERR:', str(sql_query) - if self.ctx.transactions: - self.ctx.transactions[-1].rollback() - else: - self.ctx.rollback() - raise - - if self.printing: - print >> debug, '%s (%s): %s' % (round(b-a, 2), self.ctx.dbq_count, str(sql_query)) - return out - - def _process_query(self, sql_query): - """Takes the SQLQuery object and returns query string and parameters. - """ - paramstyle = getattr(self, 'paramstyle', 'pyformat') - query = sql_query.query(paramstyle) - params = sql_query.values() - return query, params - - def _where(self, where, vars): - if isinstance(where, (int, long)): - where = "id = " + sqlparam(where) - #@@@ for backward-compatibility - elif isinstance(where, (list, tuple)) and len(where) == 2: - where = SQLQuery(where[0], where[1]) - elif isinstance(where, dict): - where = self._where_dict(where) - elif isinstance(where, SQLQuery): - pass - else: - where = reparam(where, vars) - return where - - def _where_dict(self, where): - where_clauses = [] - for k, v in where.iteritems(): - if isinstance(v, list): - where_clauses.append(k + ' IN ' + sqlquote(v)) - else: - where_clauses.append(k + ' = ' + sqlquote(v)) - if where_clauses: - return SQLQuery.join(where_clauses, " AND ") - else: - return None - - def query(self, sql_query, vars=None, processed=False, _test=False): - """ - Execute SQL query `sql_query` using dictionary `vars` to interpolate it. - If `processed=True`, `vars` is a `reparam`-style list to use - instead of interpolating. - - >>> db = DB(None, {}) - >>> db.query("SELECT * FROM foo", _test=True) - - >>> db.query("SELECT * FROM foo WHERE x = $x", vars=dict(x='f'), _test=True) - - >>> db.query("SELECT * FROM foo WHERE x = " + sqlquote('f'), _test=True) - - """ - if vars is None: vars = {} - - if not processed and not isinstance(sql_query, SQLQuery): - sql_query = reparam(sql_query, vars) - - if _test: return sql_query - - db_cursor = self._db_cursor() - self._db_execute(db_cursor, sql_query) - - if db_cursor.description: - names = [x[0] for x in db_cursor.description] - def iterwrapper(): - row = db_cursor.fetchone() - while row: - yield storage(dict(zip(names, row))) - row = db_cursor.fetchone() - out = iterbetter(iterwrapper()) - out.__len__ = lambda: int(db_cursor.rowcount) - out.list = lambda: [storage(dict(zip(names, x))) \ - for x in db_cursor.fetchall()] - else: - out = db_cursor.rowcount - - if not self.ctx.transactions: - self.ctx.commit() - return out - - def select(self, tables, vars=None, what='*', where=None, order=None, group=None, - limit=None, offset=None, _test=False): - """ - Selects `what` from `tables` with clauses `where`, `order`, - `group`, `limit`, and `offset`. Uses vars to interpolate. - Otherwise, each clause can be a SQLQuery. - - >>> db = DB(None, {}) - >>> db.select('foo', _test=True) - - >>> db.select(['foo', 'bar'], where="foo.bar_id = bar.id", limit=5, _test=True) - - >>> db.select('foo', where={'id': 5}, _test=True) - - """ - if vars is None: vars = {} - sql_clauses = self.sql_clauses(what, tables, where, group, order, limit, offset) - clauses = [self.gen_clause(sql, val, vars) for sql, val in sql_clauses if val is not None] - qout = SQLQuery.join(clauses) - if _test: return qout - return self.query(qout, processed=True) - - def where(self, table, what='*', order=None, group=None, limit=None, - offset=None, _test=False, **kwargs): - """ - Selects from `table` where keys are equal to values in `kwargs`. - - >>> db = DB(None, {}) - >>> db.where('foo', bar_id=3, _test=True) - - >>> db.where('foo', source=2, crust='dewey', _test=True) - - >>> db.where('foo', _test=True) - - """ - where = self._where_dict(kwargs) - return self.select(table, what=what, order=order, - group=group, limit=limit, offset=offset, _test=_test, - where=where) - - def sql_clauses(self, what, tables, where, group, order, limit, offset): - return ( - ('SELECT', what), - ('FROM', sqllist(tables)), - ('WHERE', where), - ('GROUP BY', group), - ('ORDER BY', order), - ('LIMIT', limit), - ('OFFSET', offset)) - - def gen_clause(self, sql, val, vars): - if isinstance(val, (int, long)): - if sql == 'WHERE': - nout = 'id = ' + sqlquote(val) - else: - nout = SQLQuery(val) - #@@@ - elif isinstance(val, (list, tuple)) and len(val) == 2: - nout = SQLQuery(val[0], val[1]) # backwards-compatibility - elif sql == 'WHERE' and isinstance(val, dict): - nout = self._where_dict(val) - elif isinstance(val, SQLQuery): - nout = val - else: - nout = reparam(val, vars) - - def xjoin(a, b): - if a and b: return a + ' ' + b - else: return a or b - - return xjoin(sql, nout) - - def insert(self, tablename, seqname=None, _test=False, **values): - """ - Inserts `values` into `tablename`. Returns current sequence ID. - Set `seqname` to the ID if it's not the default, or to `False` - if there isn't one. - - >>> db = DB(None, {}) - >>> q = db.insert('foo', name='bob', age=2, created=SQLLiteral('NOW()'), _test=True) - >>> q - - >>> q.query() - 'INSERT INTO foo (age, name, created) VALUES (%s, %s, NOW())' - >>> q.values() - [2, 'bob'] - """ - def q(x): return "(" + x + ")" - - if values: - _keys = SQLQuery.join(values.keys(), ', ') - _values = SQLQuery.join([sqlparam(v) for v in values.values()], ', ') - sql_query = "INSERT INTO %s " % tablename + q(_keys) + ' VALUES ' + q(_values) - else: - sql_query = SQLQuery(self._get_insert_default_values_query(tablename)) - - if _test: return sql_query - - db_cursor = self._db_cursor() - if seqname is not False: - sql_query = self._process_insert_query(sql_query, tablename, seqname) - - if isinstance(sql_query, tuple): - # for some databases, a separate query has to be made to find - # the id of the inserted row. - q1, q2 = sql_query - self._db_execute(db_cursor, q1) - self._db_execute(db_cursor, q2) - else: - self._db_execute(db_cursor, sql_query) - - try: - out = db_cursor.fetchone()[0] - except Exception: - out = None - - if not self.ctx.transactions: - self.ctx.commit() - return out - - def _get_insert_default_values_query(self, table): - return "INSERT INTO %s DEFAULT VALUES" % table - - def multiple_insert(self, tablename, values, seqname=None, _test=False): - """ - Inserts multiple rows into `tablename`. The `values` must be a list of dictioanries, - one for each row to be inserted, each with the same set of keys. - Returns the list of ids of the inserted rows. - Set `seqname` to the ID if it's not the default, or to `False` - if there isn't one. - - >>> db = DB(None, {}) - >>> db.supports_multiple_insert = True - >>> values = [{"name": "foo", "email": "foo@example.com"}, {"name": "bar", "email": "bar@example.com"}] - >>> db.multiple_insert('person', values=values, _test=True) - - """ - if not values: - return [] - - if not self.supports_multiple_insert: - out = [self.insert(tablename, seqname=seqname, _test=_test, **v) for v in values] - if seqname is False: - return None - else: - return out - - keys = values[0].keys() - #@@ make sure all keys are valid - - for v in values: - if v.keys() != keys: - raise ValueError, 'Not all rows have the same keys' - - sql_query = SQLQuery('INSERT INTO %s (%s) VALUES ' % (tablename, ', '.join(keys))) - - for i, row in enumerate(values): - if i != 0: - sql_query.append(", ") - SQLQuery.join([SQLParam(row[k]) for k in keys], sep=", ", target=sql_query, prefix="(", suffix=")") - - if _test: return sql_query - - db_cursor = self._db_cursor() - if seqname is not False: - sql_query = self._process_insert_query(sql_query, tablename, seqname) - - if isinstance(sql_query, tuple): - # for some databases, a separate query has to be made to find - # the id of the inserted row. - q1, q2 = sql_query - self._db_execute(db_cursor, q1) - self._db_execute(db_cursor, q2) - else: - self._db_execute(db_cursor, sql_query) - - try: - out = db_cursor.fetchone()[0] - out = range(out-len(values)+1, out+1) - except Exception: - out = None - - if not self.ctx.transactions: - self.ctx.commit() - return out - - - def update(self, tables, where, vars=None, _test=False, **values): - """ - Update `tables` with clause `where` (interpolated using `vars`) - and setting `values`. - - >>> db = DB(None, {}) - >>> name = 'Joseph' - >>> q = db.update('foo', where='name = $name', name='bob', age=2, - ... created=SQLLiteral('NOW()'), vars=locals(), _test=True) - >>> q - - >>> q.query() - 'UPDATE foo SET age = %s, name = %s, created = NOW() WHERE name = %s' - >>> q.values() - [2, 'bob', 'Joseph'] - """ - if vars is None: vars = {} - where = self._where(where, vars) - - query = ( - "UPDATE " + sqllist(tables) + - " SET " + sqlwhere(values, ', ') + - " WHERE " + where) - - if _test: return query - - db_cursor = self._db_cursor() - self._db_execute(db_cursor, query) - if not self.ctx.transactions: - self.ctx.commit() - return db_cursor.rowcount - - def delete(self, table, where, using=None, vars=None, _test=False): - """ - Deletes from `table` with clauses `where` and `using`. - - >>> db = DB(None, {}) - >>> name = 'Joe' - >>> db.delete('foo', where='name = $name', vars=locals(), _test=True) - - """ - if vars is None: vars = {} - where = self._where(where, vars) - - q = 'DELETE FROM ' + table - if using: q += ' USING ' + sqllist(using) - if where: q += ' WHERE ' + where - - if _test: return q - - db_cursor = self._db_cursor() - self._db_execute(db_cursor, q) - if not self.ctx.transactions: - self.ctx.commit() - return db_cursor.rowcount - - def _process_insert_query(self, query, tablename, seqname): - return query - - def transaction(self): - """Start a transaction.""" - return Transaction(self.ctx) - -class PostgresDB(DB): - """Postgres driver.""" - def __init__(self, **keywords): - if 'pw' in keywords: - keywords['password'] = keywords.pop('pw') - - db_module = import_driver(["psycopg2", "psycopg", "pgdb"], preferred=keywords.pop('driver', None)) - if db_module.__name__ == "psycopg2": - import psycopg2.extensions - psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - if db_module.__name__ == "pgdb" and 'port' in keywords: - keywords["host"] += ":" + str(keywords.pop('port')) - - # if db is not provided postgres driver will take it from PGDATABASE environment variable - if 'db' in keywords: - keywords['database'] = keywords.pop('db') - - self.dbname = "postgres" - self.paramstyle = db_module.paramstyle - DB.__init__(self, db_module, keywords) - self.supports_multiple_insert = True - self._sequences = None - - def _process_insert_query(self, query, tablename, seqname): - if seqname is None: - # when seqname is not provided guess the seqname and make sure it exists - seqname = tablename + "_id_seq" - if seqname not in self._get_all_sequences(): - seqname = None - - if seqname: - query += "; SELECT currval('%s')" % seqname - - return query - - def _get_all_sequences(self): - """Query postgres to find names of all sequences used in this database.""" - if self._sequences is None: - q = "SELECT c.relname FROM pg_class c WHERE c.relkind = 'S'" - self._sequences = set([c.relname for c in self.query(q)]) - return self._sequences - - def _connect(self, keywords): - conn = DB._connect(self, keywords) - try: - conn.set_client_encoding('UTF8') - except AttributeError: - # fallback for pgdb driver - conn.cursor().execute("set client_encoding to 'UTF-8'") - return conn - - def _connect_with_pooling(self, keywords): - conn = DB._connect_with_pooling(self, keywords) - conn._con._con.set_client_encoding('UTF8') - return conn - -class MySQLDB(DB): - def __init__(self, **keywords): - import MySQLdb as db - if 'pw' in keywords: - keywords['passwd'] = keywords['pw'] - del keywords['pw'] - - if 'charset' not in keywords: - keywords['charset'] = 'utf8' - elif keywords['charset'] is None: - del keywords['charset'] - - self.paramstyle = db.paramstyle = 'pyformat' # it's both, like psycopg - self.dbname = "mysql" - DB.__init__(self, db, keywords) - self.supports_multiple_insert = True - - def _process_insert_query(self, query, tablename, seqname): - return query, SQLQuery('SELECT last_insert_id();') - - def _get_insert_default_values_query(self, table): - return "INSERT INTO %s () VALUES()" % table - -def import_driver(drivers, preferred=None): - """Import the first available driver or preferred driver. - """ - if preferred: - drivers = [preferred] - - for d in drivers: - try: - return __import__(d, None, None, ['x']) - except ImportError: - pass - raise ImportError("Unable to import " + " or ".join(drivers)) - -class SqliteDB(DB): - def __init__(self, **keywords): - db = import_driver(["sqlite3", "pysqlite2.dbapi2", "sqlite"], preferred=keywords.pop('driver', None)) - - if db.__name__ in ["sqlite3", "pysqlite2.dbapi2"]: - db.paramstyle = 'qmark' - - # sqlite driver doesn't create datatime objects for timestamp columns unless `detect_types` option is passed. - # It seems to be supported in sqlite3 and pysqlite2 drivers, not surte about sqlite. - keywords.setdefault('detect_types', db.PARSE_DECLTYPES) - - self.paramstyle = db.paramstyle - keywords['database'] = keywords.pop('db') - keywords['pooling'] = False # sqlite don't allows connections to be shared by threads - self.dbname = "sqlite" - DB.__init__(self, db, keywords) - - def _process_insert_query(self, query, tablename, seqname): - return query, SQLQuery('SELECT last_insert_rowid();') - - def query(self, *a, **kw): - out = DB.query(self, *a, **kw) - if isinstance(out, iterbetter): - del out.__len__ - return out - -class FirebirdDB(DB): - """Firebird Database. - """ - def __init__(self, **keywords): - try: - import kinterbasdb as db - except Exception: - db = None - pass - if 'pw' in keywords: - keywords['password'] = keywords.pop('pw') - keywords['database'] = keywords.pop('db') - - self.paramstyle = db.paramstyle - - DB.__init__(self, db, keywords) - - def delete(self, table, where=None, using=None, vars=None, _test=False): - # firebird doesn't support using clause - using=None - return DB.delete(self, table, where, using, vars, _test) - - def sql_clauses(self, what, tables, where, group, order, limit, offset): - return ( - ('SELECT', ''), - ('FIRST', limit), - ('SKIP', offset), - ('', what), - ('FROM', sqllist(tables)), - ('WHERE', where), - ('GROUP BY', group), - ('ORDER BY', order) - ) - -class MSSQLDB(DB): - def __init__(self, **keywords): - import pymssql as db - if 'pw' in keywords: - keywords['password'] = keywords.pop('pw') - keywords['database'] = keywords.pop('db') - self.dbname = "mssql" - DB.__init__(self, db, keywords) - - def _process_query(self, sql_query): - """Takes the SQLQuery object and returns query string and parameters. - """ - # MSSQLDB expects params to be a tuple. - # Overwriting the default implementation to convert params to tuple. - paramstyle = getattr(self, 'paramstyle', 'pyformat') - query = sql_query.query(paramstyle) - params = sql_query.values() - return query, tuple(params) - - def sql_clauses(self, what, tables, where, group, order, limit, offset): - return ( - ('SELECT', what), - ('TOP', limit), - ('FROM', sqllist(tables)), - ('WHERE', where), - ('GROUP BY', group), - ('ORDER BY', order), - ('OFFSET', offset)) - - def _test(self): - """Test LIMIT. - - Fake presence of pymssql module for running tests. - >>> import sys - >>> sys.modules['pymssql'] = sys.modules['sys'] - - MSSQL has TOP clause instead of LIMIT clause. - >>> db = MSSQLDB(db='test', user='joe', pw='secret') - >>> db.select('foo', limit=4, _test=True) - - """ - pass - -class OracleDB(DB): - def __init__(self, **keywords): - import cx_Oracle as db - if 'pw' in keywords: - keywords['password'] = keywords.pop('pw') - - #@@ TODO: use db.makedsn if host, port is specified - keywords['dsn'] = keywords.pop('db') - self.dbname = 'oracle' - db.paramstyle = 'numeric' - self.paramstyle = db.paramstyle - - # oracle doesn't support pooling - keywords.pop('pooling', None) - DB.__init__(self, db, keywords) - - def _process_insert_query(self, query, tablename, seqname): - if seqname is None: - # It is not possible to get seq name from table name in Oracle - return query - else: - return query + "; SELECT %s.currval FROM dual" % seqname - -def dburl2dict(url): - """ - Takes a URL to a database and parses it into an equivalent dictionary. - - >>> dburl2dict('postgres:///mygreatdb') - {'pw': None, 'dbn': 'postgres', 'db': 'mygreatdb', 'host': None, 'user': None, 'port': None} - >>> dburl2dict('postgres://james:day@serverfarm.example.net:5432/mygreatdb') - {'pw': 'day', 'dbn': 'postgres', 'db': 'mygreatdb', 'host': 'serverfarm.example.net', 'user': 'james', 'port': 5432} - >>> dburl2dict('postgres://james:day@serverfarm.example.net/mygreatdb') - {'pw': 'day', 'dbn': 'postgres', 'db': 'mygreatdb', 'host': 'serverfarm.example.net', 'user': 'james', 'port': None} - >>> dburl2dict('postgres://james:d%40y@serverfarm.example.net/mygreatdb') - {'pw': 'd@y', 'dbn': 'postgres', 'db': 'mygreatdb', 'host': 'serverfarm.example.net', 'user': 'james', 'port': None} - >>> dburl2dict('mysql://james:d%40y@serverfarm.example.net/mygreatdb') - {'pw': 'd@y', 'dbn': 'mysql', 'db': 'mygreatdb', 'host': 'serverfarm.example.net', 'user': 'james', 'port': None} - """ - parts = urlparse.urlparse(urllib.unquote(url)) - - return {'dbn': parts.scheme, - 'user': parts.username, - 'pw': parts.password, - 'db': parts.path[1:], - 'host': parts.hostname, - 'port': parts.port} - -_databases = {} -def database(dburl=None, **params): - """Creates appropriate database using params. - - Pooling will be enabled if DBUtils module is available. - Pooling can be disabled by passing pooling=False in params. - """ - if not dburl and not params: - dburl = os.environ['DATABASE_URL'] - if dburl: - params = dburl2dict(dburl) - dbn = params.pop('dbn') - if dbn in _databases: - return _databases[dbn](**params) - else: - raise UnknownDB, dbn - -def register_database(name, clazz): - """ - Register a database. - - >>> class LegacyDB(DB): - ... def __init__(self, **params): - ... pass - ... - >>> register_database('legacy', LegacyDB) - >>> db = database(dbn='legacy', db='test', user='joe', passwd='secret') - """ - _databases[name] = clazz - -register_database('mysql', MySQLDB) -register_database('postgres', PostgresDB) -register_database('sqlite', SqliteDB) -register_database('firebird', FirebirdDB) -register_database('mssql', MSSQLDB) -register_database('oracle', OracleDB) - -def _interpolate(format): - """ - Takes a format string and returns a list of 2-tuples of the form - (boolean, string) where boolean says whether string should be evaled - or not. - - from (public domain, Ka-Ping Yee) - """ - from tokenize import tokenprog - - def matchorfail(text, pos): - match = tokenprog.match(text, pos) - if match is None: - raise _ItplError(text, pos) - return match, match.end() - - namechars = "abcdefghijklmnopqrstuvwxyz" \ - "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"; - chunks = [] - pos = 0 - - while 1: - dollar = format.find("$", pos) - if dollar < 0: - break - nextchar = format[dollar + 1] - - if nextchar == "{": - chunks.append((0, format[pos:dollar])) - pos, level = dollar + 2, 1 - while level: - match, pos = matchorfail(format, pos) - tstart, tend = match.regs[3] - token = format[tstart:tend] - if token == "{": - level = level + 1 - elif token == "}": - level = level - 1 - chunks.append((1, format[dollar + 2:pos - 1])) - - elif nextchar in namechars: - chunks.append((0, format[pos:dollar])) - match, pos = matchorfail(format, dollar + 1) - while pos < len(format): - if format[pos] == "." and \ - pos + 1 < len(format) and format[pos + 1] in namechars: - match, pos = matchorfail(format, pos + 1) - elif format[pos] in "([": - pos, level = pos + 1, 1 - while level: - match, pos = matchorfail(format, pos) - tstart, tend = match.regs[3] - token = format[tstart:tend] - if token[0] in "([": - level = level + 1 - elif token[0] in ")]": - level = level - 1 - else: - break - chunks.append((1, format[dollar + 1:pos])) - else: - chunks.append((0, format[pos:dollar + 1])) - pos = dollar + 1 + (nextchar == "$") - - if pos < len(format): - chunks.append((0, format[pos:])) - return chunks - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/debugerror.py b/lib/nulib/python/nulib/ext/web/debugerror.py deleted file mode 100644 index 656d812..0000000 --- a/lib/nulib/python/nulib/ext/web/debugerror.py +++ /dev/null @@ -1,354 +0,0 @@ -""" -pretty debug errors -(part of web.py) - -portions adapted from Django -Copyright (c) 2005, the Lawrence Journal-World -Used under the modified BSD license: -http://www.xfree86.org/3.3.6/COPYRIGHT2.html#5 -""" - -__all__ = ["debugerror", "djangoerror", "emailerrors"] - -import sys, urlparse, pprint, traceback -from template import Template -from net import websafe -from utils import sendmail, safestr -import webapi as web - -import os, os.path -whereami = os.path.join(os.getcwd(), __file__) -whereami = os.path.sep.join(whereami.split(os.path.sep)[:-1]) -djangoerror_t = """\ -$def with (exception_type, exception_value, frames) - - - - - - $exception_type at $ctx.path - - - - - -$def dicttable (d, kls='req', id=None): - $ items = d and d.items() or [] - $items.sort() - $:dicttable_items(items, kls, id) - -$def dicttable_items(items, kls='req', id=None): - $if items: - - - $for k, v in items: - - -
VariableValue
$k
$prettify(v)
- $else: -

No data.

- -
-

$exception_type at $ctx.path

-

$exception_value

- - - - - - -
Python$frames[0].filename in $frames[0].function, line $frames[0].lineno
Web$ctx.method $ctx.home$ctx.path
-
-
-

Traceback (innermost first)

-
    -$for frame in frames: -
  • - $frame.filename in $frame.function - $if frame.context_line is not None: -
    - $if frame.pre_context: -
      - $for line in frame.pre_context: -
    1. $line
    2. -
    -
    1. $frame.context_line ...
    - $if frame.post_context: -
      - $for line in frame.post_context: -
    1. $line
    2. -
    -
    - - $if frame.vars: -
    - Local vars - $# $inspect.formatargvalues(*inspect.getargvalues(frame['tb'].tb_frame)) -
    - $:dicttable(frame.vars, kls='vars', id=('v' + str(frame.id))) -
  • -
-
- -
-$if ctx.output or ctx.headers: -

Response so far

-

HEADERS

- $:dicttable_items(ctx.headers) - -

BODY

-

- $ctx.output -

- -

Request information

- -

INPUT

-$:dicttable(web.input(_unicode=False)) - - -$:dicttable(web.cookies()) - -

META

-$ newctx = [(k, v) for (k, v) in ctx.iteritems() if not k.startswith('_') and not isinstance(v, dict)] -$:dicttable(dict(newctx)) - -

ENVIRONMENT

-$:dicttable(ctx.env) -
- -
-

- You're seeing this error because you have web.config.debug - set to True. Set that to False if you don't want to see this. -

-
- - - -""" - -djangoerror_r = None - -def djangoerror(): - def _get_lines_from_file(filename, lineno, context_lines): - """ - Returns context_lines before and after lineno from file. - Returns (pre_context_lineno, pre_context, context_line, post_context). - """ - try: - source = open(filename).readlines() - lower_bound = max(0, lineno - context_lines) - upper_bound = lineno + context_lines - - pre_context = \ - [line.strip('\n') for line in source[lower_bound:lineno]] - context_line = source[lineno].strip('\n') - post_context = \ - [line.strip('\n') for line in source[lineno + 1:upper_bound]] - - return lower_bound, pre_context, context_line, post_context - except (OSError, IOError, IndexError): - return None, [], None, [] - - exception_type, exception_value, tback = sys.exc_info() - frames = [] - while tback is not None: - filename = tback.tb_frame.f_code.co_filename - function = tback.tb_frame.f_code.co_name - lineno = tback.tb_lineno - 1 - - # hack to get correct line number for templates - lineno += tback.tb_frame.f_locals.get("__lineoffset__", 0) - - pre_context_lineno, pre_context, context_line, post_context = \ - _get_lines_from_file(filename, lineno, 7) - - if '__hidetraceback__' not in tback.tb_frame.f_locals: - frames.append(web.storage({ - 'tback': tback, - 'filename': filename, - 'function': function, - 'lineno': lineno, - 'vars': tback.tb_frame.f_locals, - 'id': id(tback), - 'pre_context': pre_context, - 'context_line': context_line, - 'post_context': post_context, - 'pre_context_lineno': pre_context_lineno, - })) - tback = tback.tb_next - frames.reverse() - urljoin = urlparse.urljoin - def prettify(x): - try: - out = pprint.pformat(x) - except Exception, e: - out = '[could not display: <' + e.__class__.__name__ + \ - ': '+str(e)+'>]' - return out - - global djangoerror_r - if djangoerror_r is None: - djangoerror_r = Template(djangoerror_t, filename=__file__, filter=websafe) - - t = djangoerror_r - globals = {'ctx': web.ctx, 'web':web, 'dict':dict, 'str':str, 'prettify': prettify} - t.t.func_globals.update(globals) - return t(exception_type, exception_value, frames) - -def debugerror(): - """ - A replacement for `internalerror` that presents a nice page with lots - of debug information for the programmer. - - (Based on the beautiful 500 page from [Django](http://djangoproject.com/), - designed by [Wilson Miner](http://wilsonminer.com/).) - """ - return web._InternalError(djangoerror()) - -def emailerrors(to_address, olderror, from_address=None): - """ - Wraps the old `internalerror` handler (pass as `olderror`) to - additionally email all errors to `to_address`, to aid in - debugging production websites. - - Emails contain a normal text traceback as well as an - attachment containing the nice `debugerror` page. - """ - from_address = from_address or to_address - - def emailerrors_internal(): - error = olderror() - tb = sys.exc_info() - error_name = tb[0] - error_value = tb[1] - tb_txt = ''.join(traceback.format_exception(*tb)) - path = web.ctx.path - request = web.ctx.method + ' ' + web.ctx.home + web.ctx.fullpath - - message = "\n%s\n\n%s\n\n" % (request, tb_txt) - - sendmail( - "your buggy site <%s>" % from_address, - "the bugfixer <%s>" % to_address, - "bug: %(error_name)s: %(error_value)s (%(path)s)" % locals(), - message, - attachments=[ - dict(filename="bug.html", content=safestr(djangoerror())) - ], - ) - return error - - return emailerrors_internal - -if __name__ == "__main__": - urls = ( - '/', 'index' - ) - from application import application - app = application(urls, globals()) - app.internalerror = debugerror - - class index: - def GET(self): - thisdoesnotexist - - app.run() diff --git a/lib/nulib/python/nulib/ext/web/form.py b/lib/nulib/python/nulib/ext/web/form.py deleted file mode 100644 index f2f836c..0000000 --- a/lib/nulib/python/nulib/ext/web/form.py +++ /dev/null @@ -1,416 +0,0 @@ -""" -HTML forms -(part of web.py) -""" - -import copy, re -import webapi as web -import utils, net - -def attrget(obj, attr, value=None): - try: - if hasattr(obj, 'has_key') and obj.has_key(attr): - return obj[attr] - except TypeError: - # Handle the case where has_key takes different number of arguments. - # This is the case with Model objects on appengine. See #134 - pass - if hasattr(obj, attr): - return getattr(obj, attr) - return value - -class Form(object): - r""" - HTML form. - - >>> f = Form(Textbox("x")) - >>> f.render() - u'\n \n
' - """ - def __init__(self, *inputs, **kw): - self.inputs = inputs - self.valid = True - self.note = None - self.validators = kw.pop('validators', []) - - def __call__(self, x=None): - o = copy.deepcopy(self) - if x: o.validates(x) - return o - - def render(self): - out = '' - out += self.rendernote(self.note) - out += '\n' - - for i in self.inputs: - html = utils.safeunicode(i.pre) + i.render() + self.rendernote(i.note) + utils.safeunicode(i.post) - if i.is_hidden(): - out += ' \n' % (html) - else: - out += ' \n' % (i.id, net.websafe(i.description), html) - out += "
%s
%s
" - return out - - def render_css(self): - out = [] - out.append(self.rendernote(self.note)) - for i in self.inputs: - if not i.is_hidden(): - out.append('' % (i.id, net.websafe(i.description))) - out.append(i.pre) - out.append(i.render()) - out.append(self.rendernote(i.note)) - out.append(i.post) - out.append('\n') - return ''.join(out) - - def rendernote(self, note): - if note: return '%s' % net.websafe(note) - else: return "" - - def validates(self, source=None, _validate=True, **kw): - source = source or kw or web.input() - out = True - for i in self.inputs: - v = attrget(source, i.name) - if _validate: - out = i.validate(v) and out - else: - i.set_value(v) - if _validate: - out = out and self._validate(source) - self.valid = out - return out - - def _validate(self, value): - self.value = value - for v in self.validators: - if not v.valid(value): - self.note = v.msg - return False - return True - - def fill(self, source=None, **kw): - return self.validates(source, _validate=False, **kw) - - def __getitem__(self, i): - for x in self.inputs: - if x.name == i: return x - raise KeyError, i - - def __getattr__(self, name): - # don't interfere with deepcopy - inputs = self.__dict__.get('inputs') or [] - for x in inputs: - if x.name == name: return x - raise AttributeError, name - - def get(self, i, default=None): - try: - return self[i] - except KeyError: - return default - - def _get_d(self): #@@ should really be form.attr, no? - return utils.storage([(i.name, i.get_value()) for i in self.inputs]) - d = property(_get_d) - -class Input(object): - def __init__(self, name, *validators, **attrs): - self.name = name - self.validators = validators - self.attrs = attrs = AttributeList(attrs) - - self.description = attrs.pop('description', name) - self.value = attrs.pop('value', None) - self.pre = attrs.pop('pre', "") - self.post = attrs.pop('post', "") - self.note = None - - self.id = attrs.setdefault('id', self.get_default_id()) - - if 'class_' in attrs: - attrs['class'] = attrs['class_'] - del attrs['class_'] - - def is_hidden(self): - return False - - def get_type(self): - raise NotImplementedError - - def get_default_id(self): - return self.name - - def validate(self, value): - self.set_value(value) - - for v in self.validators: - if not v.valid(value): - self.note = v.msg - return False - return True - - def set_value(self, value): - self.value = value - - def get_value(self): - return self.value - - def render(self): - attrs = self.attrs.copy() - attrs['type'] = self.get_type() - if self.value is not None: - attrs['value'] = self.value - attrs['name'] = self.name - return '' % attrs - - def rendernote(self, note): - if note: return '%s' % net.websafe(note) - else: return "" - - def addatts(self): - # add leading space for backward-compatibility - return " " + str(self.attrs) - -class AttributeList(dict): - """List of atributes of input. - - >>> a = AttributeList(type='text', name='x', value=20) - >>> a - - """ - def copy(self): - return AttributeList(self) - - def __str__(self): - return " ".join(['%s="%s"' % (k, net.websafe(v)) for k, v in self.items()]) - - def __repr__(self): - return '' % repr(str(self)) - -class Textbox(Input): - """Textbox input. - - >>> Textbox(name='foo', value='bar').render() - u'' - >>> Textbox(name='foo', value=0).render() - u'' - """ - def get_type(self): - return 'text' - -class Password(Input): - """Password input. - - >>> Password(name='password', value='secret').render() - u'' - """ - - def get_type(self): - return 'password' - -class Textarea(Input): - """Textarea input. - - >>> Textarea(name='foo', value='bar').render() - u'' - """ - def render(self): - attrs = self.attrs.copy() - attrs['name'] = self.name - value = net.websafe(self.value or '') - return '' % (attrs, value) - -class Dropdown(Input): - r"""Dropdown/select input. - - >>> Dropdown(name='foo', args=['a', 'b', 'c'], value='b').render() - u'\n' - >>> Dropdown(name='foo', args=[('a', 'aa'), ('b', 'bb'), ('c', 'cc')], value='b').render() - u'\n' - """ - def __init__(self, name, args, *validators, **attrs): - self.args = args - super(Dropdown, self).__init__(name, *validators, **attrs) - - def render(self): - attrs = self.attrs.copy() - attrs['name'] = self.name - - x = '\n' - return x - - def _render_option(self, arg, indent=' '): - if isinstance(arg, (tuple, list)): - value, desc= arg - else: - value, desc = arg, arg - - value = utils.safestr(value) - if isinstance(self.value, (tuple, list)): - s_value = [utils.safestr(x) for x in self.value] - else: - s_value = utils.safestr(self.value) - - if s_value == value or (isinstance(s_value, list) and value in s_value): - select_p = ' selected="selected"' - else: - select_p = '' - return indent + '%s\n' % (select_p, net.websafe(value), net.websafe(desc)) - - -class GroupedDropdown(Dropdown): - r"""Grouped Dropdown/select input. - - >>> GroupedDropdown(name='car_type', args=(('Swedish Cars', ('Volvo', 'Saab')), ('German Cars', ('Mercedes', 'Audi'))), value='Audi').render() - u'\n' - >>> GroupedDropdown(name='car_type', args=(('Swedish Cars', (('v', 'Volvo'), ('s', 'Saab'))), ('German Cars', (('m', 'Mercedes'), ('a', 'Audi')))), value='a').render() - u'\n' - - """ - def __init__(self, name, args, *validators, **attrs): - self.args = args - super(Dropdown, self).__init__(name, *validators, **attrs) - - def render(self): - attrs = self.attrs.copy() - attrs['name'] = self.name - - x = '\n' - return x - -class Radio(Input): - def __init__(self, name, args, *validators, **attrs): - self.args = args - super(Radio, self).__init__(name, *validators, **attrs) - - def render(self): - x = '' - for arg in self.args: - if isinstance(arg, (tuple, list)): - value, desc= arg - else: - value, desc = arg, arg - attrs = self.attrs.copy() - attrs['name'] = self.name - attrs['type'] = 'radio' - attrs['value'] = value - if self.value == value: - attrs['checked'] = 'checked' - x += ' %s' % (attrs, net.websafe(desc)) - x += '' - return x - -class Checkbox(Input): - """Checkbox input. - - >>> Checkbox('foo', value='bar', checked=True).render() - u'' - >>> Checkbox('foo', value='bar').render() - u'' - >>> c = Checkbox('foo', value='bar') - >>> c.validate('on') - True - >>> c.render() - u'' - """ - def __init__(self, name, *validators, **attrs): - self.checked = attrs.pop('checked', False) - Input.__init__(self, name, *validators, **attrs) - - def get_default_id(self): - value = utils.safestr(self.value or "") - return self.name + '_' + value.replace(' ', '_') - - def render(self): - attrs = self.attrs.copy() - attrs['type'] = 'checkbox' - attrs['name'] = self.name - attrs['value'] = self.value - - if self.checked: - attrs['checked'] = 'checked' - return '' % attrs - - def set_value(self, value): - self.checked = bool(value) - - def get_value(self): - return self.checked - -class Button(Input): - """HTML Button. - - >>> Button("save").render() - u'' - >>> Button("action", value="save", html="Save Changes").render() - u'' - """ - def __init__(self, name, *validators, **attrs): - super(Button, self).__init__(name, *validators, **attrs) - self.description = "" - - def render(self): - attrs = self.attrs.copy() - attrs['name'] = self.name - if self.value is not None: - attrs['value'] = self.value - html = attrs.pop('html', None) or net.websafe(self.name) - return '' % (attrs, html) - -class Hidden(Input): - """Hidden Input. - - >>> Hidden(name='foo', value='bar').render() - u'' - """ - def is_hidden(self): - return True - - def get_type(self): - return 'hidden' - -class File(Input): - """File input. - - >>> File(name='f').render() - u'' - """ - def get_type(self): - return 'file' - -class Validator: - def __deepcopy__(self, memo): return copy.copy(self) - def __init__(self, msg, test, jstest=None): utils.autoassign(self, locals()) - def valid(self, value): - try: return self.test(value) - except: return False - -notnull = Validator("Required", bool) - -class regexp(Validator): - def __init__(self, rexp, msg): - self.rexp = re.compile(rexp) - self.msg = msg - - def valid(self, value): - return bool(self.rexp.match(value)) - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/http.py b/lib/nulib/python/nulib/ext/web/http.py deleted file mode 100644 index da67eba..0000000 --- a/lib/nulib/python/nulib/ext/web/http.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -HTTP Utilities -(from web.py) -""" - -__all__ = [ - "expires", "lastmodified", - "prefixurl", "modified", - "changequery", "url", - "profiler", -] - -import sys, os, threading, urllib, urlparse -try: import datetime -except ImportError: pass -import net, utils, webapi as web - -def prefixurl(base=''): - """ - Sorry, this function is really difficult to explain. - Maybe some other time. - """ - url = web.ctx.path.lstrip('/') - for i in xrange(url.count('/')): - base += '../' - if not base: - base = './' - return base - -def expires(delta): - """ - Outputs an `Expires` header for `delta` from now. - `delta` is a `timedelta` object or a number of seconds. - """ - if isinstance(delta, (int, long)): - delta = datetime.timedelta(seconds=delta) - date_obj = datetime.datetime.utcnow() + delta - web.header('Expires', net.httpdate(date_obj)) - -def lastmodified(date_obj): - """Outputs a `Last-Modified` header for `datetime`.""" - web.header('Last-Modified', net.httpdate(date_obj)) - -def modified(date=None, etag=None): - """ - Checks to see if the page has been modified since the version in the - requester's cache. - - When you publish pages, you can include `Last-Modified` and `ETag` - with the date the page was last modified and an opaque token for - the particular version, respectively. When readers reload the page, - the browser sends along the modification date and etag value for - the version it has in its cache. If the page hasn't changed, - the server can just return `304 Not Modified` and not have to - send the whole page again. - - This function takes the last-modified date `date` and the ETag `etag` - and checks the headers to see if they match. If they do, it returns - `True`, or otherwise it raises NotModified error. It also sets - `Last-Modified` and `ETag` output headers. - """ - try: - from __builtin__ import set - except ImportError: - # for python 2.3 - from sets import Set as set - - n = set([x.strip('" ') for x in web.ctx.env.get('HTTP_IF_NONE_MATCH', '').split(',')]) - m = net.parsehttpdate(web.ctx.env.get('HTTP_IF_MODIFIED_SINCE', '').split(';')[0]) - validate = False - if etag: - if '*' in n or etag in n: - validate = True - if date and m: - # we subtract a second because - # HTTP dates don't have sub-second precision - if date-datetime.timedelta(seconds=1) <= m: - validate = True - - if date: lastmodified(date) - if etag: web.header('ETag', '"' + etag + '"') - if validate: - raise web.notmodified() - else: - return True - -def urlencode(query, doseq=0): - """ - Same as urllib.urlencode, but supports unicode strings. - - >>> urlencode({'text':'foo bar'}) - 'text=foo+bar' - >>> urlencode({'x': [1, 2]}, doseq=True) - 'x=1&x=2' - """ - def convert(value, doseq=False): - if doseq and isinstance(value, list): - return [convert(v) for v in value] - else: - return utils.safestr(value) - - query = dict([(k, convert(v, doseq)) for k, v in query.items()]) - return urllib.urlencode(query, doseq=doseq) - -def changequery(query=None, **kw): - """ - Imagine you're at `/foo?a=1&b=2`. Then `changequery(a=3)` will return - `/foo?a=3&b=2` -- the same URL but with the arguments you requested - changed. - """ - if query is None: - query = web.rawinput(method='get') - for k, v in kw.iteritems(): - if v is None: - query.pop(k, None) - else: - query[k] = v - out = web.ctx.path - if query: - out += '?' + urlencode(query, doseq=True) - return out - -def url(path=None, doseq=False, **kw): - """ - Makes url by concatenating web.ctx.homepath and path and the - query string created using the arguments. - """ - if path is None: - path = web.ctx.path - if path.startswith("/"): - out = web.ctx.homepath + path - else: - out = path - - if kw: - out += '?' + urlencode(kw, doseq=doseq) - - return out - -def profiler(app): - """Outputs basic profiling information at the bottom of each response.""" - from utils import profile - def profile_internal(e, o): - out, result = profile(app)(e, o) - return list(out) + ['
' + net.websafe(result) + '
'] - return profile_internal - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/httpserver.py b/lib/nulib/python/nulib/ext/web/httpserver.py deleted file mode 100644 index 24aad6b..0000000 --- a/lib/nulib/python/nulib/ext/web/httpserver.py +++ /dev/null @@ -1,334 +0,0 @@ -__all__ = ["runsimple"] - -import sys, os -from os import path -import urlparse, posixpath, urllib -from SimpleHTTPServer import SimpleHTTPRequestHandler -import urllib -import posixpath - -import webapi as web -import net -import utils - -def runbasic(func, server_address=("0.0.0.0", 8080)): - """ - Runs a simple HTTP server hosting WSGI app `func`. The directory `static/` - is hosted statically. - - Based on [WsgiServer][ws] from [Colin Stewart][cs]. - - [ws]: http://www.owlfish.com/software/wsgiutils/documentation/wsgi-server-api.html - [cs]: http://www.owlfish.com/ - """ - # Copyright (c) 2004 Colin Stewart (http://www.owlfish.com/) - # Modified somewhat for simplicity - # Used under the modified BSD license: - # http://www.xfree86.org/3.3.6/COPYRIGHT2.html#5 - - import SimpleHTTPServer, SocketServer, BaseHTTPServer, urlparse - import socket, errno - import traceback - - class WSGIHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): - def run_wsgi_app(self): - protocol, host, path, parameters, query, fragment = \ - urlparse.urlparse('http://dummyhost%s' % self.path) - - # we only use path, query - env = {'wsgi.version': (1, 0) - ,'wsgi.url_scheme': 'http' - ,'wsgi.input': self.rfile - ,'wsgi.errors': sys.stderr - ,'wsgi.multithread': 1 - ,'wsgi.multiprocess': 0 - ,'wsgi.run_once': 0 - ,'REQUEST_METHOD': self.command - ,'REQUEST_URI': self.path - ,'PATH_INFO': path - ,'QUERY_STRING': query - ,'CONTENT_TYPE': self.headers.get('Content-Type', '') - ,'CONTENT_LENGTH': self.headers.get('Content-Length', '') - ,'REMOTE_ADDR': self.client_address[0] - ,'SERVER_NAME': self.server.server_address[0] - ,'SERVER_PORT': str(self.server.server_address[1]) - ,'SERVER_PROTOCOL': self.request_version - } - - for http_header, http_value in self.headers.items(): - env ['HTTP_%s' % http_header.replace('-', '_').upper()] = \ - http_value - - # Setup the state - self.wsgi_sent_headers = 0 - self.wsgi_headers = [] - - try: - # We have there environment, now invoke the application - result = self.server.app(env, self.wsgi_start_response) - try: - try: - for data in result: - if data: - self.wsgi_write_data(data) - finally: - if hasattr(result, 'close'): - result.close() - except socket.error, socket_err: - # Catch common network errors and suppress them - if (socket_err.args[0] in \ - (errno.ECONNABORTED, errno.EPIPE)): - return - except socket.timeout, socket_timeout: - return - except: - print >> web.debug, traceback.format_exc(), - - if (not self.wsgi_sent_headers): - # We must write out something! - self.wsgi_write_data(" ") - return - - do_POST = run_wsgi_app - do_PUT = run_wsgi_app - do_DELETE = run_wsgi_app - - def do_GET(self): - if self.path.startswith('/static/'): - SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) - else: - self.run_wsgi_app() - - def wsgi_start_response(self, response_status, response_headers, - exc_info=None): - if (self.wsgi_sent_headers): - raise Exception \ - ("Headers already sent and start_response called again!") - # Should really take a copy to avoid changes in the application.... - self.wsgi_headers = (response_status, response_headers) - return self.wsgi_write_data - - def wsgi_write_data(self, data): - if (not self.wsgi_sent_headers): - status, headers = self.wsgi_headers - # Need to send header prior to data - status_code = status[:status.find(' ')] - status_msg = status[status.find(' ') + 1:] - self.send_response(int(status_code), status_msg) - for header, value in headers: - self.send_header(header, value) - self.end_headers() - self.wsgi_sent_headers = 1 - # Send the data - self.wfile.write(data) - - class WSGIServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): - def __init__(self, func, server_address): - BaseHTTPServer.HTTPServer.__init__(self, - server_address, - WSGIHandler) - self.app = func - self.serverShuttingDown = 0 - - #print "http://%s:%d/" % server_address - WSGIServer(func, server_address).serve_forever() - -# The WSGIServer instance. -# Made global so that it can be stopped in embedded mode. -server = None - -def runsimple(func, server_address=("0.0.0.0", 8080)): - """ - Runs [CherryPy][cp] WSGI server hosting WSGI app `func`. - The directory `static/` is hosted statically. - - [cp]: http://www.cherrypy.org - """ - global server - func = StaticMiddleware(func) - func = LogMiddleware(func) - - server = WSGIServer(server_address, func) - - #if server.ssl_adapter: - # print "https://%s:%d/" % server_address - #else: - # print "http://%s:%d/" % server_address - - try: - server.start() - except (KeyboardInterrupt, SystemExit): - server.stop() - server = None - -def WSGIServer(server_address, wsgi_app): - """Creates CherryPy WSGI server listening at `server_address` to serve `wsgi_app`. - This function can be overwritten to customize the webserver or use a different webserver. - """ - import wsgiserver - - # Default values of wsgiserver.ssl_adapters uses cherrypy.wsgiserver - # prefix. Overwriting it make it work with web.wsgiserver. - wsgiserver.ssl_adapters = { - 'builtin': 'web.wsgiserver.ssl_builtin.BuiltinSSLAdapter', - 'pyopenssl': 'web.wsgiserver.ssl_pyopenssl.pyOpenSSLAdapter', - } - - server = wsgiserver.CherryPyWSGIServer(server_address, wsgi_app, server_name="localhost") - - def create_ssl_adapter(cert, key): - # wsgiserver tries to import submodules as cherrypy.wsgiserver.foo. - # That doesn't work as not it is web.wsgiserver. - # Patching sys.modules temporarily to make it work. - import types - cherrypy = types.ModuleType('cherrypy') - cherrypy.wsgiserver = wsgiserver - sys.modules['cherrypy'] = cherrypy - sys.modules['cherrypy.wsgiserver'] = wsgiserver - - from wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter - adapter = pyOpenSSLAdapter(cert, key) - - # We are done with our work. Cleanup the patches. - del sys.modules['cherrypy'] - del sys.modules['cherrypy.wsgiserver'] - - return adapter - - # SSL backward compatibility - if (server.ssl_adapter is None and - getattr(server, 'ssl_certificate', None) and - getattr(server, 'ssl_private_key', None)): - server.ssl_adapter = create_ssl_adapter(server.ssl_certificate, server.ssl_private_key) - - server.nodelay = not sys.platform.startswith('java') # TCP_NODELAY isn't supported on the JVM - return server - -class StaticApp(SimpleHTTPRequestHandler): - """WSGI application for serving static files.""" - def __init__(self, environ, start_response): - self.headers = [] - self.environ = environ - self.start_response = start_response - - def translate_path(self, path): - path = urlparse.urlparse(path)[2] - path = posixpath.normpath(urllib.unquote(path)) - words = path.split('/') - words = filter(None, words) - path = web.config.get('BASEDIR', os.getcwd()) - for word in words: - _, word = os.path.splitdrive(word) - _, word = os.path.split(word) - if word in (os.curdir, os.pardir): continue - path = os.path.join(path, word) - return path - - def send_response(self, status, msg=""): - self.status = str(status) + " " + msg - - def send_header(self, name, value): - self.headers.append((name, value)) - - def end_headers(self): - pass - - def log_message(*a): pass - - def __iter__(self): - environ = self.environ - - self.path = environ.get('PATH_INFO', '') - self.client_address = environ.get('REMOTE_ADDR','-'), \ - environ.get('REMOTE_PORT','-') - self.command = environ.get('REQUEST_METHOD', '-') - - from cStringIO import StringIO - self.wfile = StringIO() # for capturing error - - try: - path = self.translate_path(self.path) - etag = '"%s"' % os.path.getmtime(path) - client_etag = environ.get('HTTP_IF_NONE_MATCH') - self.send_header('ETag', etag) - if etag == client_etag: - self.send_response(304, "Not Modified") - self.start_response(self.status, self.headers) - raise StopIteration - except OSError: - pass # Probably a 404 - - f = self.send_head() - self.start_response(self.status, self.headers) - - if f: - block_size = 16 * 1024 - while True: - buf = f.read(block_size) - if not buf: - break - yield buf - f.close() - else: - value = self.wfile.getvalue() - yield value - -class StaticMiddleware: - """WSGI middleware for serving static files.""" - def __init__(self, app, prefix='/static/'): - self.app = app - self.prefix = prefix - - def __call__(self, environ, start_response): - path = environ.get('PATH_INFO', '') - path = self.normpath(path) - - if path.startswith(self.prefix): - return StaticApp(environ, start_response) - else: - return self.app(environ, start_response) - - def normpath(self, path): - path2 = posixpath.normpath(urllib.unquote(path)) - if path.endswith("/"): - path2 += "/" - return path2 - - -class LogMiddleware: - """WSGI middleware for logging the status.""" - def __init__(self, app): - self.app = app - self.format = '%s - - [%s] "%s %s %s" - %s' - - from BaseHTTPServer import BaseHTTPRequestHandler - import StringIO - f = StringIO.StringIO() - - class FakeSocket: - def makefile(self, *a): - return f - - # take log_date_time_string method from BaseHTTPRequestHandler - self.log_date_time_string = BaseHTTPRequestHandler(FakeSocket(), None, None).log_date_time_string - - def __call__(self, environ, start_response): - def xstart_response(status, response_headers, *args): - out = start_response(status, response_headers, *args) - self.log(status, environ) - return out - - return self.app(environ, xstart_response) - - def log(self, status, environ): - outfile = environ.get('wsgi.errors', web.debug) - req = environ.get('PATH_INFO', '_') - protocol = environ.get('ACTUAL_SERVER_PROTOCOL', '-') - method = environ.get('REQUEST_METHOD', '-') - host = "%s:%s" % (environ.get('REMOTE_ADDR','-'), - environ.get('REMOTE_PORT','-')) - - time = self.log_date_time_string() - - msg = self.format % (host, time, protocol, method, req, status) - print >> outfile, utils.safestr(msg) diff --git a/lib/nulib/python/nulib/ext/web/net.py b/lib/nulib/python/nulib/ext/web/net.py deleted file mode 100644 index b27fcb1..0000000 --- a/lib/nulib/python/nulib/ext/web/net.py +++ /dev/null @@ -1,244 +0,0 @@ -""" -Network Utilities -(from web.py) -""" - -__all__ = [ - "validipaddr", "validip6addr", "validipport", "validip", "validaddr", - "urlquote", - "httpdate", "parsehttpdate", - "htmlquote", "htmlunquote", "websafe", -] - -import urllib, time -try: import datetime -except ImportError: pass -import re -import socket - -def validip6addr(address): - """ - Returns True if `address` is a valid IPv6 address. - - >>> validip6addr('::') - True - >>> validip6addr('aaaa:bbbb:cccc:dddd::1') - True - >>> validip6addr('1:2:3:4:5:6:7:8:9:10') - False - >>> validip6addr('12:10') - False - """ - try: - socket.inet_pton(socket.AF_INET6, address) - except (socket.error, AttributeError): - return False - - return True - -def validipaddr(address): - """ - Returns True if `address` is a valid IPv4 address. - - >>> validipaddr('192.168.1.1') - True - >>> validipaddr('192.168.1.800') - False - >>> validipaddr('192.168.1') - False - """ - try: - octets = address.split('.') - if len(octets) != 4: - return False - for x in octets: - if not (0 <= int(x) <= 255): - return False - except ValueError: - return False - return True - -def validipport(port): - """ - Returns True if `port` is a valid IPv4 port. - - >>> validipport('9000') - True - >>> validipport('foo') - False - >>> validipport('1000000') - False - """ - try: - if not (0 <= int(port) <= 65535): - return False - except ValueError: - return False - return True - -def validip(ip, defaultaddr="0.0.0.0", defaultport=8080): - """ - Returns `(ip_address, port)` from string `ip_addr_port` - >>> validip('1.2.3.4') - ('1.2.3.4', 8080) - >>> validip('80') - ('0.0.0.0', 80) - >>> validip('192.168.0.1:85') - ('192.168.0.1', 85) - >>> validip('::') - ('::', 8080) - >>> validip('[::]:88') - ('::', 88) - >>> validip('[::1]:80') - ('::1', 80) - - """ - addr = defaultaddr - port = defaultport - - #Matt Boswell's code to check for ipv6 first - match = re.search(r'^\[([^]]+)\](?::(\d+))?$',ip) #check for [ipv6]:port - if match: - if validip6addr(match.group(1)): - if match.group(2): - if validipport(match.group(2)): return (match.group(1),int(match.group(2))) - else: - return (match.group(1),port) - else: - if validip6addr(ip): return (ip,port) - #end ipv6 code - - ip = ip.split(":", 1) - if len(ip) == 1: - if not ip[0]: - pass - elif validipaddr(ip[0]): - addr = ip[0] - elif validipport(ip[0]): - port = int(ip[0]) - else: - raise ValueError, ':'.join(ip) + ' is not a valid IP address/port' - elif len(ip) == 2: - addr, port = ip - if not validipaddr(addr) and validipport(port): - raise ValueError, ':'.join(ip) + ' is not a valid IP address/port' - port = int(port) - else: - raise ValueError, ':'.join(ip) + ' is not a valid IP address/port' - return (addr, port) - -def validaddr(string_): - """ - Returns either (ip_address, port) or "/path/to/socket" from string_ - - >>> validaddr('/path/to/socket') - '/path/to/socket' - >>> validaddr('8000') - ('0.0.0.0', 8000) - >>> validaddr('127.0.0.1') - ('127.0.0.1', 8080) - >>> validaddr('127.0.0.1:8000') - ('127.0.0.1', 8000) - >>> validip('[::1]:80') - ('::1', 80) - >>> validaddr('fff') - Traceback (most recent call last): - ... - ValueError: fff is not a valid IP address/port - """ - if '/' in string_: - return string_ - else: - return validip(string_) - -def urlquote(val): - """ - Quotes a string for use in a URL. - - >>> urlquote('://?f=1&j=1') - '%3A//%3Ff%3D1%26j%3D1' - >>> urlquote(None) - '' - >>> urlquote(u'\u203d') - '%E2%80%BD' - """ - if val is None: return '' - if not isinstance(val, unicode): val = str(val) - else: val = val.encode('utf-8') - return urllib.quote(val) - -def httpdate(date_obj): - """ - Formats a datetime object for use in HTTP headers. - - >>> import datetime - >>> httpdate(datetime.datetime(1970, 1, 1, 1, 1, 1)) - 'Thu, 01 Jan 1970 01:01:01 GMT' - """ - return date_obj.strftime("%a, %d %b %Y %H:%M:%S GMT") - -def parsehttpdate(string_): - """ - Parses an HTTP date into a datetime object. - - >>> parsehttpdate('Thu, 01 Jan 1970 01:01:01 GMT') - datetime.datetime(1970, 1, 1, 1, 1, 1) - """ - try: - t = time.strptime(string_, "%a, %d %b %Y %H:%M:%S %Z") - except ValueError: - return None - return datetime.datetime(*t[:6]) - -def htmlquote(text): - r""" - Encodes `text` for raw use in HTML. - - >>> htmlquote(u"<'&\">") - u'<'&">' - """ - text = text.replace(u"&", u"&") # Must be done first! - text = text.replace(u"<", u"<") - text = text.replace(u">", u">") - text = text.replace(u"'", u"'") - text = text.replace(u'"', u""") - return text - -def htmlunquote(text): - r""" - Decodes `text` that's HTML quoted. - - >>> htmlunquote(u'<'&">') - u'<\'&">' - """ - text = text.replace(u""", u'"') - text = text.replace(u"'", u"'") - text = text.replace(u">", u">") - text = text.replace(u"<", u"<") - text = text.replace(u"&", u"&") # Must be done last! - return text - -def websafe(val): - r"""Converts `val` so that it is safe for use in Unicode HTML. - - >>> websafe("<'&\">") - u'<'&">' - >>> websafe(None) - u'' - >>> websafe(u'\u203d') - u'\u203d' - >>> websafe('\xe2\x80\xbd') - u'\u203d' - """ - if val is None: - return u'' - elif isinstance(val, str): - val = val.decode('utf-8') - elif not isinstance(val, unicode): - val = unicode(val) - - return htmlquote(val) - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/python23.py b/lib/nulib/python/nulib/ext/web/python23.py deleted file mode 100644 index dfb331a..0000000 --- a/lib/nulib/python/nulib/ext/web/python23.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Python 2.3 compatabilty""" -import threading - -class threadlocal(object): - """Implementation of threading.local for python2.3. - """ - def __getattribute__(self, name): - if name == "__dict__": - return threadlocal._getd(self) - else: - try: - return object.__getattribute__(self, name) - except AttributeError: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError, name - - def __setattr__(self, name, value): - self.__dict__[name] = value - - def __delattr__(self, name): - try: - del self.__dict__[name] - except KeyError: - raise AttributeError, name - - def _getd(self): - t = threading.currentThread() - if not hasattr(t, '_d'): - # using __dict__ of thread as thread local storage - t._d = {} - - _id = id(self) - # there could be multiple instances of threadlocal. - # use id(self) as key - if _id not in t._d: - t._d[_id] = {} - return t._d[_id] - -if __name__ == '__main__': - d = threadlocal() - d.x = 1 - print d.__dict__ - print d.x - \ No newline at end of file diff --git a/lib/nulib/python/nulib/ext/web/session.py b/lib/nulib/python/nulib/ext/web/session.py deleted file mode 100644 index a95c9d5..0000000 --- a/lib/nulib/python/nulib/ext/web/session.py +++ /dev/null @@ -1,358 +0,0 @@ -""" -Session Management -(from web.py) -""" - -import os, time, datetime, random, base64 -import os.path -from copy import deepcopy -try: - import cPickle as pickle -except ImportError: - import pickle -try: - import hashlib - sha1 = hashlib.sha1 -except ImportError: - import sha - sha1 = sha.new - -import utils -import webapi as web - -__all__ = [ - 'Session', 'SessionExpired', - 'Store', 'DiskStore', 'DBStore', -] - -web.config.session_parameters = utils.storage({ - 'cookie_name': 'webpy_session_id', - 'cookie_domain': None, - 'cookie_path' : None, - 'timeout': 86400, #24 * 60 * 60, # 24 hours in seconds - 'ignore_expiry': True, - 'ignore_change_ip': True, - 'secret_key': 'fLjUfxqXtfNoIldA0A0J', - 'expired_message': 'Session expired', - 'httponly': True, - 'secure': False -}) - -class SessionExpired(web.HTTPError): - def __init__(self, message): - web.HTTPError.__init__(self, '200 OK', {}, data=message) - -class Session(object): - """Session management for web.py - """ - __slots__ = [ - "store", "_initializer", "_last_cleanup_time", "_config", "_data", - "__getitem__", "__setitem__", "__delitem__" - ] - - def __init__(self, app, store, initializer=None): - self.store = store - self._initializer = initializer - self._last_cleanup_time = 0 - self._config = utils.storage(web.config.session_parameters) - self._data = utils.threadeddict() - - self.__getitem__ = self._data.__getitem__ - self.__setitem__ = self._data.__setitem__ - self.__delitem__ = self._data.__delitem__ - - if app: - app.add_processor(self._processor) - - def __contains__(self, name): - return name in self._data - - def __getattr__(self, name): - return getattr(self._data, name) - - def __setattr__(self, name, value): - if name in self.__slots__: - object.__setattr__(self, name, value) - else: - setattr(self._data, name, value) - - def __delattr__(self, name): - delattr(self._data, name) - - def _processor(self, handler): - """Application processor to setup session for every request""" - self._cleanup() - self._load() - - try: - return handler() - finally: - self._save() - - def _load(self): - """Load the session from the store, by the id from cookie""" - cookie_name = self._config.cookie_name - cookie_domain = self._config.cookie_domain - cookie_path = self._config.cookie_path - httponly = self._config.httponly - self.session_id = web.cookies().get(cookie_name) - - # protection against session_id tampering - if self.session_id and not self._valid_session_id(self.session_id): - self.session_id = None - - self._check_expiry() - if self.session_id: - d = self.store[self.session_id] - self.update(d) - self._validate_ip() - - if not self.session_id: - self.session_id = self._generate_session_id() - - if self._initializer: - if isinstance(self._initializer, dict): - self.update(deepcopy(self._initializer)) - elif hasattr(self._initializer, '__call__'): - self._initializer() - - self.ip = web.ctx.ip - - def _check_expiry(self): - # check for expiry - if self.session_id and self.session_id not in self.store: - if self._config.ignore_expiry: - self.session_id = None - else: - return self.expired() - - def _validate_ip(self): - # check for change of IP - if self.session_id and self.get('ip', None) != web.ctx.ip: - if not self._config.ignore_change_ip: - return self.expired() - - def _save(self): - if not self.get('_killed'): - self._setcookie(self.session_id) - self.store[self.session_id] = dict(self._data) - else: - self._setcookie(self.session_id, expires=-1) - - def _setcookie(self, session_id, expires='', **kw): - cookie_name = self._config.cookie_name - cookie_domain = self._config.cookie_domain - cookie_path = self._config.cookie_path - httponly = self._config.httponly - secure = self._config.secure - web.setcookie(cookie_name, session_id, expires=expires, domain=cookie_domain, httponly=httponly, secure=secure, path=cookie_path) - - def _generate_session_id(self): - """Generate a random id for session""" - - while True: - rand = os.urandom(16) - now = time.time() - secret_key = self._config.secret_key - session_id = sha1("%s%s%s%s" %(rand, now, utils.safestr(web.ctx.ip), secret_key)) - session_id = session_id.hexdigest() - if session_id not in self.store: - break - return session_id - - def _valid_session_id(self, session_id): - rx = utils.re_compile('^[0-9a-fA-F]+$') - return rx.match(session_id) - - def _cleanup(self): - """Cleanup the stored sessions""" - current_time = time.time() - timeout = self._config.timeout - if current_time - self._last_cleanup_time > timeout: - self.store.cleanup(timeout) - self._last_cleanup_time = current_time - - def expired(self): - """Called when an expired session is atime""" - self._killed = True - self._save() - raise SessionExpired(self._config.expired_message) - - def kill(self): - """Kill the session, make it no longer available""" - del self.store[self.session_id] - self._killed = True - -class Store: - """Base class for session stores""" - - def __contains__(self, key): - raise NotImplementedError - - def __getitem__(self, key): - raise NotImplementedError - - def __setitem__(self, key, value): - raise NotImplementedError - - def cleanup(self, timeout): - """removes all the expired sessions""" - raise NotImplementedError - - def encode(self, session_dict): - """encodes session dict as a string""" - pickled = pickle.dumps(session_dict) - return base64.encodestring(pickled) - - def decode(self, session_data): - """decodes the data to get back the session dict """ - pickled = base64.decodestring(session_data) - return pickle.loads(pickled) - -class DiskStore(Store): - """ - Store for saving a session on disk. - - >>> import tempfile - >>> root = tempfile.mkdtemp() - >>> s = DiskStore(root) - >>> s['a'] = 'foo' - >>> s['a'] - 'foo' - >>> time.sleep(0.01) - >>> s.cleanup(0.01) - >>> s['a'] - Traceback (most recent call last): - ... - KeyError: 'a' - """ - def __init__(self, root): - # if the storage root doesn't exists, create it. - if not os.path.exists(root): - os.makedirs( - os.path.abspath(root) - ) - self.root = root - - def _get_path(self, key): - if os.path.sep in key: - raise ValueError, "Bad key: %s" % repr(key) - return os.path.join(self.root, key) - - def __contains__(self, key): - path = self._get_path(key) - return os.path.exists(path) - - def __getitem__(self, key): - path = self._get_path(key) - if os.path.exists(path): - pickled = open(path).read() - return self.decode(pickled) - else: - raise KeyError, key - - def __setitem__(self, key, value): - path = self._get_path(key) - pickled = self.encode(value) - try: - f = open(path, 'w') - try: - f.write(pickled) - finally: - f.close() - except IOError: - pass - - def __delitem__(self, key): - path = self._get_path(key) - if os.path.exists(path): - os.remove(path) - - def cleanup(self, timeout): - now = time.time() - for f in os.listdir(self.root): - path = self._get_path(f) - atime = os.stat(path).st_atime - if now - atime > timeout : - os.remove(path) - -class DBStore(Store): - """Store for saving a session in database - Needs a table with the following columns: - - session_id CHAR(128) UNIQUE NOT NULL, - atime DATETIME NOT NULL default current_timestamp, - data TEXT - """ - def __init__(self, db, table_name): - self.db = db - self.table = table_name - - def __contains__(self, key): - data = self.db.select(self.table, where="session_id=$key", vars=locals()) - return bool(list(data)) - - def __getitem__(self, key): - now = datetime.datetime.now() - try: - s = self.db.select(self.table, where="session_id=$key", vars=locals())[0] - self.db.update(self.table, where="session_id=$key", atime=now, vars=locals()) - except IndexError: - raise KeyError - else: - return self.decode(s.data) - - def __setitem__(self, key, value): - pickled = self.encode(value) - now = datetime.datetime.now() - if key in self: - self.db.update(self.table, where="session_id=$key", data=pickled,atime=now, vars=locals()) - else: - self.db.insert(self.table, False, session_id=key, atime=now, data=pickled ) - - def __delitem__(self, key): - self.db.delete(self.table, where="session_id=$key", vars=locals()) - - def cleanup(self, timeout): - timeout = datetime.timedelta(timeout/(24.0*60*60)) #timedelta takes numdays as arg - last_allowed_time = datetime.datetime.now() - timeout - self.db.delete(self.table, where="$last_allowed_time > atime", vars=locals()) - -class ShelfStore: - """Store for saving session using `shelve` module. - - import shelve - store = ShelfStore(shelve.open('session.shelf')) - - XXX: is shelve thread-safe? - """ - def __init__(self, shelf): - self.shelf = shelf - - def __contains__(self, key): - return key in self.shelf - - def __getitem__(self, key): - atime, v = self.shelf[key] - self[key] = v # update atime - return v - - def __setitem__(self, key, value): - self.shelf[key] = time.time(), value - - def __delitem__(self, key): - try: - del self.shelf[key] - except KeyError: - pass - - def cleanup(self, timeout): - now = time.time() - for k in self.shelf.keys(): - atime, v = self.shelf[k] - if now - atime > timeout : - del self[k] - -if __name__ == '__main__' : - import doctest - doctest.testmod() diff --git a/lib/nulib/python/nulib/ext/web/template.py b/lib/nulib/python/nulib/ext/web/template.py deleted file mode 100644 index ff7d4ef..0000000 --- a/lib/nulib/python/nulib/ext/web/template.py +++ /dev/null @@ -1,1534 +0,0 @@ -""" -simple, elegant templating -(part of web.py) - -Template design: - -Template string is split into tokens and the tokens are combined into nodes. -Parse tree is a nodelist. TextNode and ExpressionNode are simple nodes and -for-loop, if-loop etc are block nodes, which contain multiple child nodes. - -Each node can emit some python string. python string emitted by the -root node is validated for safeeval and executed using python in the given environment. - -Enough care is taken to make sure the generated code and the template has line to line match, -so that the error messages can point to exact line number in template. (It doesn't work in some cases still.) - -Grammar: - - template -> defwith sections - defwith -> '$def with (' arguments ')' | '' - sections -> section* - section -> block | assignment | line - - assignment -> '$ ' - line -> (text|expr)* - text -> - expr -> '$' pyexpr | '$(' pyexpr ')' | '${' pyexpr '}' - pyexpr -> -""" - -__all__ = [ - "Template", - "Render", "render", "frender", - "ParseError", "SecurityError", - "test" -] - -import tokenize -import os -import sys -import glob -import re -from UserDict import DictMixin -import warnings - -from utils import storage, safeunicode, safestr, re_compile -from webapi import config -from net import websafe - -def splitline(text): - r""" - Splits the given text at newline. - - >>> splitline('foo\nbar') - ('foo\n', 'bar') - >>> splitline('foo') - ('foo', '') - >>> splitline('') - ('', '') - """ - index = text.find('\n') + 1 - if index: - return text[:index], text[index:] - else: - return text, '' - -class Parser: - """Parser Base. - """ - def __init__(self): - self.statement_nodes = STATEMENT_NODES - self.keywords = KEYWORDS - - def parse(self, text, name="