From e840468a1bd489d7961b797336e9e412812a98a0 Mon Sep 17 00:00:00 2001 From: Karl Chen Date: Fri, 18 Jul 2003 21:43:12 +0000 Subject: [PATCH] test cases and make_project svn path=/trunk/boinc/; revision=1734 --- Makefile.am | 5 + Makefile.in | 19 +- Makefile.incl | 2 +- api/Makefile.in | 4 +- apps/Makefile.in | 4 +- client/Makefile.in | 4 +- configure | 13 +- configure.ac | 7 +- db/Makefile.in | 2 +- db/constraints.sql | 1 - db/drop.sql | 2 - db/schema.sql | 1 - lib/Makefile.in | 4 +- py/Makefile.am | 10 + py/Makefile.in | 324 ++++++++++++ py/boinc.py | 585 ++++++++++++++++++++++ py/db_def_to_py | 16 + py/version.py.in | 18 + sched/Makefile.am | 7 +- sched/Makefile.in | 11 +- test/.cvsignore | 1 - test/Makefile.am | 13 +- test/Makefile.in | 35 +- test/boinc.py | 1117 ------------------------------------------ test/test_1sec.py | 1 - test/test_backend.py | 1 - test/test_concat.py | 16 +- test/test_sanity.py | 17 +- test/test_uc.py | 20 +- test/testbase.py | 610 +++++++++++++++++++++++ tools/Makefile.am | 4 + tools/Makefile.in | 8 +- tools/make_project | 159 ++++++ 33 files changed, 1820 insertions(+), 1221 deletions(-) delete mode 100644 db/drop.sql create mode 100644 py/Makefile.am create mode 100644 py/Makefile.in create mode 100644 py/boinc.py create mode 100755 py/db_def_to_py create mode 100644 py/version.py.in delete mode 100644 test/boinc.py create mode 100644 test/testbase.py create mode 100755 tools/make_project diff --git a/Makefile.am b/Makefile.am index 92c605c27a..f25f0d3b72 100644 --- a/Makefile.am +++ b/Makefile.am @@ -13,6 +13,11 @@ EXTRA_DIST = \ stripchart \ INSTALL +py html_user html_ops: + [ "$srcdir" != . ] && ln -s $srcdir/$@ $@ + +all-local: html_user html_ops + # the perl line below gets rid of '\r' characters because MSVC barfs at them. dist-hook: diff --git a/Makefile.in b/Makefile.in index 5992d576f9..62ebd80257 100644 --- a/Makefile.in +++ b/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -343,7 +343,7 @@ distcleancheck_listfiles = find . -type f -print distdir: $(DISTFILES) $(am__remove_distdir) mkdir $(distdir) - $(mkinstalldirs) $(distdir)/test + $(mkinstalldirs) $(distdir)/py $(distdir)/test @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ list='$(DISTFILES)'; for file in $$list; do \ @@ -464,7 +464,7 @@ distcleancheck: distclean exit 1; } >&2 check-am: all-am check: check-recursive -all-am: Makefile config.h +all-am: Makefile config.h all-local installdirs: installdirs-recursive installdirs-am: @@ -539,10 +539,10 @@ uninstall-am: uninstall-info-am uninstall-info: uninstall-info-recursive -.PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am check check-am clean \ - clean-generic clean-recursive ctags ctags-recursive dist \ - dist-all dist-gzip dist-zip distcheck distclean \ - distclean-generic distclean-hdr distclean-recursive \ +.PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am all-local check \ + check-am clean clean-generic clean-recursive ctags \ + ctags-recursive dist dist-all dist-gzip dist-zip distcheck \ + distclean distclean-generic distclean-hdr distclean-recursive \ distclean-tags distcleancheck distdir distuninstallcheck dvi \ dvi-am dvi-recursive info info-am info-recursive install \ install-am install-data install-data-am install-data-recursive \ @@ -557,6 +557,11 @@ uninstall-info: uninstall-info-recursive uninstall-info-am uninstall-info-recursive uninstall-recursive +py html_user html_ops: + [ "$srcdir" != . ] && ln -s $srcdir/$@ $@ + +all-local: html_user html_ops + # the perl line below gets rid of '\r' characters because MSVC barfs at them. dist-hook: diff --git a/Makefile.incl b/Makefile.incl index 005e186bb2..19160ce3ab 100644 --- a/Makefile.incl +++ b/Makefile.incl @@ -27,7 +27,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before # programs linking to it: diff --git a/api/Makefile.in b/api/Makefile.in index eaefd94776..607530cb24 100644 --- a/api/Makefile.in +++ b/api/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before diff --git a/apps/Makefile.in b/apps/Makefile.in index 0f5b874d2e..b4f93f0072 100644 --- a/apps/Makefile.in +++ b/apps/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before diff --git a/client/Makefile.in b/client/Makefile.in index a5d7b4e962..5a83e6ae97 100644 --- a/client/Makefile.in +++ b/client/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ # by default, "-static -static-libgcc" on linux. STATIC_FLAGS = @STATIC_FLAGS@ @@ -165,7 +165,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before diff --git a/configure b/configure index ebb79c5d3e..403cce5187 100755 --- a/configure +++ b/configure @@ -330,7 +330,7 @@ ac_includes_default="\ # include #endif" -ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS build build_cpu build_vendor build_os host host_cpu host_vendor host_os target target_cpu target_vendor target_os INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA CYGPATH_W PACKAGE VERSION ACLOCAL AUTOCONF AUTOMAKE AUTOHEADER MAKEINFO AMTAR install_sh STRIP ac_ct_STRIP INSTALL_STRIP_PROGRAM AWK SET_MAKE am__leading_dot MAJOR_VERSION MINOR_VERSION SOURCE_TOP_DIR MAINTAINER_MODE_TRUE MAINTAINER_MODE_FALSE MAINT CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC EXEEXT OBJEXT DEPDIR am__include am__quote AMDEP_TRUE AMDEP_FALSE AMDEPBACKSLASH CCDEPMODE am__fastdepCC_TRUE am__fastdepCC_FALSE CXX CXXFLAGS ac_ct_CXX CXXDEPMODE am__fastdepCXX_TRUE am__fastdepCXX_FALSE CPP RANLIB ac_ct_RANLIB EGREP CLIENT_BIN_FILENAME STATIC_FLAGS LIBOBJS LTLIBOBJS' +ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS build build_cpu build_vendor build_os host host_cpu host_vendor host_os target target_cpu target_vendor target_os INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA CYGPATH_W PACKAGE VERSION ACLOCAL AUTOCONF AUTOMAKE AUTOHEADER MAKEINFO AMTAR install_sh STRIP ac_ct_STRIP INSTALL_STRIP_PROGRAM AWK SET_MAKE am__leading_dot MAJOR_VERSION MINOR_VERSION BUILD_TOP_DIR MAINTAINER_MODE_TRUE MAINTAINER_MODE_FALSE MAINT CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC EXEEXT OBJEXT DEPDIR am__include am__quote AMDEP_TRUE AMDEP_FALSE AMDEPBACKSLASH CCDEPMODE am__fastdepCC_TRUE am__fastdepCC_FALSE CXX CXXFLAGS ac_ct_CXX CXXDEPMODE am__fastdepCXX_TRUE am__fastdepCXX_FALSE CPP RANLIB ac_ct_RANLIB EGREP CLIENT_BIN_FILENAME STATIC_FLAGS LIBOBJS LTLIBOBJS' ac_subst_files='' # Initialize some variables set by options. @@ -1845,7 +1845,7 @@ cat >>confdefs.h <<_ACEOF _ACEOF -SOURCE_TOP_DIR=`pwd` +BUILD_TOP_DIR=`pwd` echo "$as_me:$LINENO: checking whether to enable maintainer-specific portions of Makefiles" >&5 @@ -6658,12 +6658,12 @@ CLIENT_BIN_FILENAME=boinc_${MAJOR_VERSION}.${MINOR_VERSION}_$host${EXEEXT} # by default, create static binaries on linux. if [ "$target_os" = "linux-gnu" ]; then - STATIC_FLAGS="-static -static-libgcc" + STATIC_FLAGS="-static" fi echo "checking static flags... ${STATIC_FLAGS:-(none)}" - ac_config_files="$ac_config_files RSAEuro/source/Makefile RSAEuro/Makefile api/Makefile apps/Makefile client/Makefile db/Makefile lib/Makefile sched/Makefile tools/Makefile test/Makefile test/version.inc test/version.py Makefile" + ac_config_files="$ac_config_files RSAEuro/source/Makefile RSAEuro/Makefile api/Makefile apps/Makefile client/Makefile db/Makefile lib/Makefile sched/Makefile tools/Makefile test/Makefile py/Makefile test/version.inc py/version.py Makefile" ac_config_headers="$ac_config_headers config.h" @@ -7241,8 +7241,9 @@ do "sched/Makefile" ) CONFIG_FILES="$CONFIG_FILES sched/Makefile" ;; "tools/Makefile" ) CONFIG_FILES="$CONFIG_FILES tools/Makefile" ;; "test/Makefile" ) CONFIG_FILES="$CONFIG_FILES test/Makefile" ;; + "py/Makefile" ) CONFIG_FILES="$CONFIG_FILES py/Makefile" ;; "test/version.inc" ) CONFIG_FILES="$CONFIG_FILES test/version.inc" ;; - "test/version.py" ) CONFIG_FILES="$CONFIG_FILES test/version.py" ;; + "py/version.py" ) CONFIG_FILES="$CONFIG_FILES py/version.py" ;; "Makefile" ) CONFIG_FILES="$CONFIG_FILES Makefile" ;; "depfiles" ) CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "config.h" ) CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; @@ -7364,7 +7365,7 @@ s,@SET_MAKE@,$SET_MAKE,;t t s,@am__leading_dot@,$am__leading_dot,;t t s,@MAJOR_VERSION@,$MAJOR_VERSION,;t t s,@MINOR_VERSION@,$MINOR_VERSION,;t t -s,@SOURCE_TOP_DIR@,$SOURCE_TOP_DIR,;t t +s,@BUILD_TOP_DIR@,$BUILD_TOP_DIR,;t t s,@MAINTAINER_MODE_TRUE@,$MAINTAINER_MODE_TRUE,;t t s,@MAINTAINER_MODE_FALSE@,$MAINTAINER_MODE_FALSE,;t t s,@MAINT@,$MAINT,;t t diff --git a/configure.ac b/configure.ac index 2a3c99ebe5..6880d4a011 100644 --- a/configure.ac +++ b/configure.ac @@ -49,7 +49,7 @@ AC_DEFINE_UNQUOTED(MAJOR_VERSION, $MAJOR_VERSION, [Major part of version number] AC_DEFINE_UNQUOTED(MINOR_VERSION, $MINOR_VERSION, [Minor part of version number]) AC_DEFINE_UNQUOTED(HOSTTYPE, "$host", [Host for this compilation]) -AC_SUBST(SOURCE_TOP_DIR, `pwd`) +AC_SUBST(BUILD_TOP_DIR, `pwd`) AM_MAINTAINER_MODE @@ -129,7 +129,7 @@ AC_SUBST(CLIENT_BIN_FILENAME,[boinc_${MAJOR_VERSION}.${MINOR_VERSION}_$host${EXE # by default, create static binaries on linux. [if [ "$target_os" = "linux-gnu" ]; then - STATIC_FLAGS="-static -static-libgcc" + STATIC_FLAGS="-static" fi echo "checking static flags... ${STATIC_FLAGS:-(none)}"] AC_SUBST(STATIC_FLAGS) @@ -144,8 +144,9 @@ AC_CONFIG_FILES([RSAEuro/source/Makefile sched/Makefile tools/Makefile test/Makefile + py/Makefile test/version.inc - test/version.py + py/version.py Makefile ]) diff --git a/db/Makefile.in b/db/Makefile.in index 92c2ae078b..17108bf6d4 100644 --- a/db/Makefile.in +++ b/db/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ diff --git a/db/constraints.sql b/db/constraints.sql index 8575b9b36a..e898da45f8 100644 --- a/db/constraints.sql +++ b/db/constraints.sql @@ -1,4 +1,3 @@ -use BOINC_DB_NAME; alter table platform add unique(name); diff --git a/db/drop.sql b/db/drop.sql deleted file mode 100644 index cb28e92cc2..0000000000 --- a/db/drop.sql +++ /dev/null @@ -1,2 +0,0 @@ -drop database if exists BOINC_DB_NAME; -create database BOINC_DB_NAME; diff --git a/db/schema.sql b/db/schema.sql index 359542477b..1aa524d030 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,4 +1,3 @@ -use BOINC_DB_NAME create table project ( id integer not null auto_increment, diff --git a/lib/Makefile.in b/lib/Makefile.in index 36973a187c..7395227ea6 100644 --- a/lib/Makefile.in +++ b/lib/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before diff --git a/py/Makefile.am b/py/Makefile.am new file mode 100644 index 0000000000..7596d66359 --- /dev/null +++ b/py/Makefile.am @@ -0,0 +1,10 @@ +## $Id$ + +include $(top_srcdir)/Makefile.incl + +EXTRA_DIST = boinc.py boinc_db.py boinc_db.py.in version.py.in db_def_to_py + +$(srcdir)/boinc_db.py: ../db/boinc_db.h ../lib/result_state.h + cat $^ | $(srcdir)/db_def_to_py > $@ + +# all: $(srcdir)/boinc_db.py diff --git a/py/Makefile.in b/py/Makefile.in new file mode 100644 index 0000000000..8c6c9d9e0b --- /dev/null +++ b/py/Makefile.in @@ -0,0 +1,324 @@ +# Makefile.in generated by automake 1.7.5 from Makefile.am. +# @configure_input@ + +# Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 +# Free Software Foundation, Inc. +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +srcdir = @srcdir@ +top_srcdir = @top_srcdir@ +VPATH = @srcdir@ +pkgdatadir = $(datadir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +top_builddir = .. + +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +INSTALL = @INSTALL@ +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +target_triplet = @target@ +ACLOCAL = @ACLOCAL@ +AMDEP_FALSE = @AMDEP_FALSE@ +AMDEP_TRUE = @AMDEP_TRUE@ +AMTAR = @AMTAR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CLIENT_BIN_FILENAME = @CLIENT_BIN_FILENAME@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LTLIBOBJS = @LTLIBOBJS@ +MAINT = @MAINT@ +MAINTAINER_MODE_FALSE = @MAINTAINER_MODE_FALSE@ +MAINTAINER_MODE_TRUE = @MAINTAINER_MODE_TRUE@ +MAJOR_VERSION = @MAJOR_VERSION@ +MAKEINFO = @MAKEINFO@ +MINOR_VERSION = @MINOR_VERSION@ +OBJEXT = @OBJEXT@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STATIC_FLAGS = @STATIC_FLAGS@ +STRIP = @STRIP@ +VERSION = @VERSION@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_RANLIB = @ac_ct_RANLIB@ +ac_ct_STRIP = @ac_ct_STRIP@ +am__fastdepCC_FALSE = @am__fastdepCC_FALSE@ +am__fastdepCC_TRUE = @am__fastdepCC_TRUE@ +am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@ +am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +datadir = @datadir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +oldincludedir = @oldincludedir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +target = @target@ +target_alias = @target_alias@ +target_cpu = @target_cpu@ +target_os = @target_os@ +target_vendor = @target_vendor@ + +MYSQL_INCLUDES = /usr/local/mysql/include +MYSQL_INCLUDES2 = /usr/local/mysql/include/mysql +MYSQL_INCLUDES3 = /usr/include/mysql + +MYSQL_LIBS = \ + -L/usr/local/mysql/lib -L/sw/lib/mysql -L/usr/local/lib/mysql \ + -lmysqlclient -L/usr/local/lib -lz \ + -lm $(NETLIBS) + + +RSA_LIBS = \ + -L$(top_builddir)/RSAEuro/source \ + -lrsaeuro + + +AM_CFLAGS = -g -Wall +AM_CXXFLAGS = -g -Wall + +AM_CPPFLAGS = \ + -I$(top_srcdir)/lib \ + -I$(top_srcdir)/api \ + -I$(top_srcdir)/db \ + -I$(top_srcdir)/RSAEuro/source \ + -I$(top_srcdir)/client \ + -I$(top_srcdir)/tools \ + -I$(MYSQL_INCLUDES) \ + -I$(MYSQL_INCLUDES2) \ + -I$(MYSQL_INCLUDES3) \ + -include $(top_builddir)/config.h + + +# this is useful as a dependency to make sure librsaeuro gets compiled before +# programs linking to it: +LIBRSA = $(top_builddir)/RSAEuro/source/librsaeuro.a + +EXTRA_DIST = boinc.py boinc_db.py boinc_db.py.in version.py.in db_def_to_py +subdir = py +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = version.py +DIST_SOURCES = +DIST_COMMON = $(top_srcdir)/Makefile.incl Makefile.am Makefile.in \ + version.py.in +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ Makefile.am $(top_srcdir)/Makefile.incl $(top_srcdir)/configure.ac $(ACLOCAL_M4) + cd $(top_srcdir) && \ + $(AUTOMAKE) --gnu py/Makefile +Makefile: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.in $(top_builddir)/config.status + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe) +version.py: $(top_builddir)/config.status version.py.in + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ +uninstall-info-am: +tags: TAGS +TAGS: + +ctags: CTAGS +CTAGS: + +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) + +top_distdir = .. +distdir = $(top_distdir)/$(PACKAGE)-$(VERSION) + +distdir: $(DISTFILES) + $(mkinstalldirs) $(distdir)/.. + @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ + list='$(DISTFILES)'; for file in $$list; do \ + case $$file in \ + $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ + $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ + esac; \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test "$$dir" != "$$file" && test "$$dir" != "."; then \ + dir="/$$dir"; \ + $(mkinstalldirs) "$(distdir)$$dir"; \ + else \ + dir=''; \ + fi; \ + if test -d $$d/$$file; then \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + fi; \ + cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + else \ + test -f $(distdir)/$$file \ + || cp -p $$d/$$file $(distdir)/$$file \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile + +installdirs: +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + INSTALL_STRIP_FLAG=-s \ + `test -z '$(STRIP)' || \ + echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -rm -f Makefile $(CONFIG_CLEAN_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-generic mostlyclean-am + +distclean: distclean-am + +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +info: info-am + +info-am: + +install-data-am: + +install-exec-am: + +install-info: install-info-am + +install-man: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-info-am + +.PHONY: all all-am check check-am clean clean-generic distclean \ + distclean-generic distdir dvi dvi-am info info-am install \ + install-am install-data install-data-am install-exec \ + install-exec-am install-info install-info-am install-man \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic pdf pdf-am ps ps-am uninstall uninstall-am \ + uninstall-info-am + + +$(LIBRSA): + cd $(top_builddir)/RSAEuro/source; ${MAKE} librsaeuro.a + +$(srcdir)/boinc_db.py: ../db/boinc_db.h ../lib/result_state.h + cat $^ | $(srcdir)/db_def_to_py > $@ + +# all: $(srcdir)/boinc_db.py +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/py/boinc.py b/py/boinc.py new file mode 100644 index 0000000000..758c42a55a --- /dev/null +++ b/py/boinc.py @@ -0,0 +1,585 @@ +## $Id$ + +# the module MySQLdb can be installed on debian with "apt-get install python2.2-mysqldb" + +# TODO: make things work if build_dir != src_dir + +from version import * +from boinc_db import * +import os, sys, glob, time, shutil, re, random +import MySQLdb + +class Options: + pass + +options = Options() +errors = Options() +errors.count = 0 + +options.have_init = False +options.install_method = None +options.echo_verbose = 1 +options.is_test = False +options.client_bin_filename = CLIENT_BIN_FILENAME + +def init(): + if options.have_init: return + options.have_init = True + options.tty = os.isatty(1) + options.echo_overwrite = options.tty and options.echo_verbose==1 + + # VERBOSE: 0 = print nothing + # 1 = print some (default + # if output is a tty, overwrite lines. + # 2 = print all + + if options.install_method == 'copy': + options.install_function = shutil.copy + elif options.install_method == 'link' or options.install_method == 'hardlink': + options.install_function = my_link + elif options.install_method == 'symlink' or options.install_method == 'softlink': + options.install_function = my_symlink + else: + fatal_error("Invalid install method: %s"%options.install_method) + +prev_overwrite = False +def verbose_echo(level, line): + global prev_overwrite + if level == 0: + if prev_overwrite: + print + print line + prev_overwrite = False + elif options.echo_verbose >= level: + if options.echo_overwrite: + print "\r ", + print "\r", line, + sys.stdout.flush() + prev_overwrite = True + else: + print line + +def fatal_error(msg): + errors.count += 1 + verbose_echo(0, "FATAL ERROR: "+msg) + sys.exit(1) + +def error(msg, fatal=0): + if fatal: fatal_error(msg) + errors.count += 1 + verbose_echo(0, "ERROR: "+msg) + +def verbose_sleep(msg, wait): + front = msg + ' [sleep ' + back = ']' + for i in range(1,wait+1): + verbose_echo(1, msg + ' [sleep ' + ('.'*i).ljust(wait) + ']') + time.sleep(1) + +def get_env_var(name, default = None): + value = os.environ.get(name, default) + if value == None: + print "Environment variable %s not defined" % name + sys.exit(1) + return value + +def shell_call(cmd, doexec=False, failok=False): + if doexec: + os.execl('/bin/sh', 'sh', '-c', cmd) + error("Command failed: "+cmd, fatal=(not failok)) + os._exit(1) + if os.system(cmd): + error("Command failed: "+cmd, fatal=(not failok)) + return 1 + return 0 + +def verbose_shell_call(cmd, doexec=False, failok=False): + verbose_echo(2, " "+cmd) + return shell_call(cmd, doexec, failok) + +def destpath(src,dest): + if dest.endswith('/'): + return dest + os.path.basename(src) + else: + return dest + +# my_symlink and my_link just add the filename to the exception object if one +# is raised - don't know why it's not already there +def my_symlink(src,dest): + dest = destpath(src,dest) + try: + os.symlink(src,dest) + except OSError, e: + e.filename = dest + raise + +def my_link(src,dest): + dest = destpath(src,dest) + try: + os.link(src,dest) + except OSError, e: + e.filename = dest + raise + +# install = options.install_function +def install(src, dest): + options.install_function(src, dest) + +def install_glob(glob_source, dest, failok=False): + dest = os.path.join(dest, '') # append '/' if necessary + for src in glob.glob(glob_source): + if not os.path.isdir(src): + install(src, dest) + +def macro_substitute(macro, replacement, infile, outfile): + open(outfile, 'w').write(open(infile).read().replace(macro, replacement)) +def macro_substitute_inplace(macro, replacement, inoutfile): + old = inoutfile + '.old' + os.rename(inoutfile, old) + macro_substitute(macro, replacement, old, inoutfile) + +def check_program_exists(prog): + if not os.path.isfile(prog): + fatal_error(""" +Executable not found: %s +Did you `make' yet? +""" % prog) +def check_core_client_executable(): + check_program_exists(builddir('client', CLIENT_BIN_FILENAME)) +def check_app_executable(app): + check_program_exists(builddir('apps', app)) + +def make_executable(name): + os.chmod(name, 755) +def force_symlink(src, dest): + if os.path.exists(dest): + os.unlink(dest) + my_symlink(src, dest) +def rmtree(dir): + if os.path.exists(dir): + shutil.rmtree(dir) + +def _remove_trail(s, suffix): + if s.endswith(suffix): + return s[:-len(suffix)] + else: + return s + +def _url_to_filename(url): + return _remove_trail(url.replace('http://','').replace('/','_'),'_') +def account_file_name(url): + return 'account_' + _url_to_filename(url) + '.xml' + +def srcdir(*dirs): + return apply(os.path.join,(TOP_SRC_DIR,)+dirs) + +def builddir(*dirs): + return apply(os.path.join,(TOP_BUILD_DIR,)+dirs) + +def run_tool(cmd): + verbose_shell_call(builddir('tools', cmd)) + +def _gen_key_p(private_key, public_key): + shell_call("%s/crypt_prog -genkey 1024 %s %s >/dev/null" % ( + builddir('lib'), + private_key, + public_key)) +def _gen_key(key): + _gen_key_p(key+'_private', key+'_public') + +def get_int(s): + '''Convert a string to an int; return 0 on error.''' + try: return int(sys.argv[1]) + except: return 0 + +def unique(list): + d = {} + for i in list: + d[i] = 1 + return d.keys() + +def map_xml(dic, keys): + if not isinstance(dic,dict): + dic = dic.__dict__ + s = '' + for key in keys: + s += "<%s>%s\n" % (key, dic[key], key) + return s[:-1] + +def generate_shmem_key(): + return '0x1111%x' % random.randrange(0,2**16) + +def _check_vars(dict, **names): + for key in names: + value = names[key] + if not key in dict: + if value == None: + raise SystemExit('error in test script: required parameter "%s" not specified'%key) + dict[key] = value + for key in dict: + if not key in names: + raise SystemExit('error in test script: extraneous parameter "%s" unknown'%key) + +def db_query(db, query): + db.query(query) + result = db.use_result() + return result and result.fetch_row(0,1) + +def num_results(db, q=""): + return db_query(db, "select count(*) from result "+q)[0]['count(*)'] +def num_wus_left(db): + return num_results(db, "where server_state=%d"%RESULT_SERVER_STATE_UNSENT) +def num_results_done(db): + return num_results(db, "where server_state=%d"%RESULT_SERVER_STATE_OVER) + +def query_yesno(str): + '''Query user; default Yes''' + verbose_echo(0,'') + print str, "[Y/n] ", + return not raw_input().strip().lower().startswith('n') + +def query_noyes(str): + '''Query user; default No''' + verbose_echo(0,'') + print str, "[y/N] ", + return raw_input().strip().lower().startswith('y') + +class Platform: + def __init__(self, name, user_friendly_name=None): + self.name = name + self.user_friendly_name = user_friendly_name or name + +class CoreVersion: + def __init__(self): + self.version = 1 + self.platform = Platform(PLATFORM) + self.exec_dir = builddir('client') + self.exec_name = options.client_bin_filename + +class App: + def __init__(self, name): + assert(name) + self.name = name + +class AppVersion: + def __init__(self, app, version = 1): + self.exec_names = [] + self.exec_dir = builddir('apps') + self.exec_names = [app.name] + self.app = app + self.version = 1 + self.platform = Platform(PLATFORM) + +class Project: + def __init__(self, + short_name, long_name, + core_versions=None, key_dir=None, + apps=None, app_versions=None, appname=None, + resource_share=None, redundancy=None): + init() + self.config_options = [] + self.config_daemons = [] + self.short_name = short_name or 'test_'+appname + self.long_name = long_name or 'Project ' + self.short_name.replace('_',' ').capitalize() + self.db_passwd = '' + self.shmem_key = generate_shmem_key() + self.resource_share = resource_share or 1 + self.redundancy = redundancy or 2 + self.output_level = 3 + + self.master_url = os.path.join(options.html_url , self.short_name , '') + self.download_url = os.path.join(options.html_url , self.short_name , 'download') + self.cgi_url = options.cgi_url + self.upload_url = os.path.join(self.cgi_url , self.short_name , 'file_upload_handler') + self.scheduler_url = os.path.join(self.cgi_url , self.short_name , 'cgi') + self.project_dir = os.path.join(options.projects_dir , self.short_name) + self.download_dir = os.path.join(self.project_dir , 'download') + self.upload_dir = os.path.join(self.project_dir , 'upload') + self.key_dir = key_dir or os.path.join(self.project_dir , 'keys') + self.user_name = options.user_name + self.db_name = self.user_name + '_' + self.short_name + self.project_php_file = srcdir('html_user/project.inc.sample') + self.project_specific_prefs_php_file = srcdir('html_user/project_specific_prefs.inc.sample') + + self.core_versions = core_versions or [CoreVersion()] + self.app_versions = app_versions or [AppVersion(App(appname))] + self.apps = apps or unique(map(lambda av: av.app, self.app_versions)) + self.platforms = [Platform(PLATFORM)] + # convenience vars: + self.app_version = self.app_versions[0] + self.app = self.apps[0] + + def dir(self, *dirs): + return apply(os.path.join,(self.project_dir,)+dirs) + + def keydir(self, *dirs): + return apply(os.path.join,(self.key_dir,)+dirs) + + def run_db_script(self, script): + shell_call('mysql %s < %s' % (self.db_name,srcdir('db', script))) + + def drop_db_if_exists(self): + shell_call('echo "drop database if exists %s" | mysql' % self.db_name) + + def create_db(self): + shell_call('echo "create database %s" | mysql' % self.db_name) + + def db_open(self): + return MySQLdb.connect(db=self.db_name) + + def create_keys(self): + _gen_key(self.keydir('upload')) + _gen_key(self.keydir('code_sign')) + + def query_create_keys(self): + return not query_yesno("Keys don't exist in %s; generate them?"%self.key_dir) + + def keys_exist(self): + keys = ['upload_private', 'upload_public', + 'code_sign_private', 'code_sign_public' ] + for key in keys: + if not os.path.exists(self.keydir(key)): return False + return True + + def install_project(self, scheduler_file = None): + if os.path.exists(self.dir()): + raise SystemExit('Project directory "%s" already exists; this would clobber it!'%self.dir()) + + verbose_echo(1, "Setting up server: creating directories"); + # make the CGI writeable in case scheduler writes req/reply files + # TODO: that is a security risk; don't do this in the future - write + # req/reply files somewhere else + map(lambda dir: os.mkdir(self.dir(dir)), + [ '', 'cgi-bin', 'bin', 'upload', 'download', 'keys', 'log', + 'html_ops', 'html_user', 'html_user/project_specific']) + map(lambda dir: os.chmod(self.dir(dir), 0777), + [ 'cgi-bin', 'upload', 'log' ]) + + if not self.keys_exist(): + if self.query_create_keys(): + verbose_echo(1, "Setting up server files: generating keys"); + self.create_keys() + + # copy the user and administrative PHP files to the project dir, + verbose_echo(1, "Setting up server files: copying html directories") + + install_glob(srcdir('html_user/*.php'), self.dir('html_user/')) + install_glob(srcdir('html_user/*.inc'), self.dir('html_user/')) + install_glob(srcdir('html_user/*.txt'), self.dir('html_user/')) + install_glob(srcdir('html_ops/*.php'), self.dir('html_ops/')) + install_glob(srcdir('html_ops/*.inc'), self.dir('html_ops/')) + install(builddir('tools/country_select'), self.dir('html_user/')) + install(self.project_php_file, + self.dir('html_user', 'project_specific', 'project.inc')) + install(self.project_specific_prefs_php_file, + self.dir('html_user', 'project_specific', 'project_specific_prefs.inc')) + + my_symlink(self.download_dir, self.dir('html_user', 'download')) + + # Copy the sched server in the cgi directory with the cgi names given + # source_dir/html_usr/schedulers.txt + # + + verbose_echo(1, "Setting up server files: copying cgi programs"); + if scheduler_file: + r = re.compile('([^<]+)', re.IGNORECASE) + f = open(self.dir('html_user', scheduler_file)) + for line in f: + # not sure if this is what the scheduler file is supposed to + # mean + match = r.search(line) + if match: + cgi_name = match.group(1) + verbose_echo(2, "Setting up server files: copying " + cgi_name); + install(builddir('sched/cgi'), self.dir('cgi-bin', cgi_name,'')) + f.close() + else: + scheduler_file = 'schedulers.txt' + f = open(self.dir('html_user', scheduler_file), 'w') + print >>f, "" + self.scheduler_url, "" + f.close() + + # copy all the backend programs + map(lambda (s): install(builddir('sched',s), self.dir('cgi-bin',s)), + [ 'cgi', 'file_upload_handler']) + map(lambda (s): install(builddir('sched',s), self.dir('bin',s)), + [ 'make_work', + 'feeder', 'timeout_check', 'validate_test', + 'file_deleter', 'assimilator' ]) + map(lambda (s): install(srcdir('sched',s), self.dir('bin',s)), + [ 'start', 'stop', 'status', + 'boinc_config.py', 'grep_logs' ]) + + verbose_echo(1, "Setting up database") + self.create_db() + map(self.run_db_script, [ 'schema.sql', 'constraints.sql' ]) + + db = self.db_open() + db.query("insert into project(short_name, long_name) values('%s', '%s')" %( + self.short_name, self.long_name)); + + verbose_echo(1, "Setting up database: adding %d apps(s)" % len(self.apps)) + for app in self.apps: + check_app_executable(app.name) + db.query("insert into app(name, create_time) values ('%s', %d)" %( + app.name, time.time())) + + self.platforms = unique(map(lambda a: a.platform, self.app_versions)) + verbose_echo(1, "Setting up database: adding %d platform(s)" % len(self.platforms)) + + db.close() + + for platform in self.platforms: + run_tool("add platform -db_name %s -platform_name %s -user_friendly_name '%s'" %( + self.db_name, platform.name, platform.user_friendly_name)) + + verbose_echo(1, "Setting up database: adding %d core version(s)" % len(self.core_versions)) + for core_version in self.core_versions: + run_tool(("add core_version -db_name %s -platform_name %s" + + " -version %s -download_dir %s -download_url %s -exec_dir %s" + + " -exec_files %s") % + (self.db_name, core_version.platform.name, + core_version.version, + self.download_dir, + self.download_url, + core_version.exec_dir, + core_version.exec_name)) + + verbose_echo(1, "Setting up database: adding %d app version(s)" % len(self.app_versions)) + for app_version in self.app_versions: + app = app_version.app + cmd = ("add app_version -db_name %s -app_name '%s'" + + " -platform_name %s -version %s -download_dir %s -download_url %s" + + " -code_sign_keyfile %s -exec_dir %s -exec_files") % ( + self.db_name, app.name, app_version.platform.name, + app_version.version, + self.download_dir, + self.download_url, + os.path.join(self.key_dir, 'code_sign_private'), + app_version.exec_dir) + for exec_name in app_version.exec_names: + cmd += ' ' + exec_name + run_tool(cmd) + + verbose_echo(1, "Setting up server files: writing config files"); + + config = map_xml(self, + [ 'db_name', 'db_passwd', 'shmem_key', + 'key_dir', 'download_url', 'download_dir', + 'upload_url', 'upload_dir', 'project_dir', 'user_name', + 'cgi_url', + 'output_level' ]) + self.config_options = config.split('\n') + self.write_config() + + # edit "index.php" in the user HTML directory to have the right file + # as the source for scheduler_urls; default is schedulers.txt + + macro_substitute_inplace('FILE_NAME', scheduler_file, + self.dir('html_user', 'index.php')) + + # create symbolic links to the CGI and HTML directories + verbose_echo(1, "Setting up server files: linking cgi programs") + force_symlink(self.dir('cgi-bin'), os.path.join(options.cgi_dir, self.short_name)) + force_symlink(self.dir('html_user'), os.path.join(options.html_dir, self.short_name)) + force_symlink(self.dir('html_ops'), os.path.join(options.html_dir, self.short_name+'_admin')) + + # show the URLs for user and admin sites + admin_url = os.path.join("html_user", self.short_name+'_admin/') + + verbose_echo(2, "Master URL: " + self.master_url) + verbose_echo(2, "Admin URL: " + admin_url) + + def http_password(self, user, password): + 'Adds http password protection to the html_ops directory' + passwd_file = self.dir('html_ops', '.htpassword') + f = open(self.dir('html_ops', '.htaccess'), 'w') + print >>f, "AuthName '%s Administration'" % self.long_name + print >>f, "AuthType Basic" + print >>f, "AuthUserFile %s" % passwd_file + print >>f, "require valid-user" + f.close() + shell_call("htpassword -bc %s %s %s" % (passwd_file, user, password)) + + def _run_sched_prog(self, prog, args='', logfile=None): + verbose_shell_call("cd %s && ./%s %s >> %s.log 2>&1" % + (self.dir('bin'), prog, args, (logfile or prog))) + + def start_servers(self): + self.started = True + self._run_sched_prog('start', '-v --enable') + verbose_sleep("Starting servers for project '%s'" % self.short_name, 1) + + def _build_sched_commandlines(self, progname, kwargs): + '''Given a KWARGS dictionary build a list of command lines string depending on the program.''' + each_app = False + if progname == 'feeder': + _check_vars(kwargs) + elif progname == 'timeout_check': + _check_vars(kwargs, app=self.app, nerror=5, ndet=5, nredundancy=5) + elif progname == 'make_work': + work = kwargs.get('work', self.work) + _check_vars(kwargs, cushion=None, redundancy=self.redundancy, + result_template=os.path.realpath(work.result_template), + wu_name=work.wu_template) + elif progname == 'validate_test': + _check_vars(kwargs, quorum=self.redundancy) + each_app = True + elif progname == 'file_deleter': + _check_vars(kwargs) + elif progname == 'assimilator': + _check_vars(kwargs) + each_app = True + else: + raise SystemExit("test script error: invalid progname '%s'"%progname) + cmdline = ' '.join(map(lambda k: '-%s %s'%(k,kwargs[k]), kwargs.keys())) + if each_app: + return map(lambda av: '-app %s %s'%(av.app.name,cmdline), self.app_versions) + else: + return [cmdline] + + def sched_run(self, prog, **kwargs): + for cmdline in self._build_sched_commandlines(prog, kwargs): + self._run_sched_prog(prog, '-d 3 -one_pass '+cmdline) + def sched_install(self, prog, **kwargs): + for cmdline in self._build_sched_commandlines(prog, kwargs): + self.config_daemons.append("%s -d 3 %s" %(prog, cmdline)) + self.write_config() + def sched_uninstall(self, prog): + self.config_daemons = filter(lambda l: l.find(prog)==-1, self.config_daemons) + self.write_config() + + def start_stripcharts(self): + map(lambda l: self.copy(os.path.join('stripchart', l), 'cgi-bin/'), + [ 'stripchart.cgi', 'stripchart', 'stripchart.cnf', + 'looper', 'db_looper', 'datafiles', 'get_load', 'dir_size' ]) + macro_substitute('BOINC_DB_NAME', self.db_name, srcdir('stripchart/samples/db_count'), + self.dir('bin/db_count')) + make_executable(self.dir('bin/db_count')) + + self._run_sched_prog('looper' , 'get_load 1' , 'get_load') + self._run_sched_prog('db_looper' , '"result" 1' , 'count_results') + self._run_sched_prog('db_looper' , '"workunit where assimilate_state=2" 1' , 'assimilated_wus') + self._run_sched_prog('looper' , '"dir_size ../download" 1' , 'download_size') + self._run_sched_prog('looper' , '"dir_size ../upload" 1' , 'upload_size') + + def stop(self): + verbose_echo(1,"Stopping server(s) for project '%s'"%self.short_name) + self._run_sched_prog('start', '-v --disable') + self.started = False + + def maybe_stop(self): + if self.started: self.stop() + + def write_config(self): + f = open(self.dir('config.xml'), 'w') + print >>f, '' + print >>f, ' ' + for line in self.config_options: + print >>f, " ", line + print >>f, ' ' + print >>f, ' ' + for daemon in self.config_daemons: + print >>f, " %s"%daemon + print >>f, ' ' + print >>f, '' diff --git a/py/db_def_to_py b/py/db_def_to_py new file mode 100755 index 0000000000..5f90478589 --- /dev/null +++ b/py/db_def_to_py @@ -0,0 +1,16 @@ +#!/usr/bin/env perl + +## $Id$ + +## parse the db #defines to php. + +## syntax: ./db_def_to_php < ../db/boinc_db.h > boinc_db.inc + +print "# Generated by db_def_to_py on "; +system('date'); + +while (<>) { + if (/^\s*#define\s+([^\s]+)\s+([0-9]+)\s*$/) { + print qq/$1 = $2\n/; + } +} diff --git a/py/version.py.in b/py/version.py.in new file mode 100644 index 0000000000..33f3c856bf --- /dev/null +++ b/py/version.py.in @@ -0,0 +1,18 @@ +## $Id$ + +import os +import sys + +# define version numbers using autoconf +MAJOR_VERSION = @MAJOR_VERSION@ +MINOR_VERSION = @MINOR_VERSION@ +CLIENT_BIN_FILENAME = "@CLIENT_BIN_FILENAME@" +PLATFORM = "@host@" +_BUILD_TOP_DIR = "@BUILD_TOP_DIR@" +_REL_TOP_SRC_DIR = "@top_srcdir@" +_REL_SRC_DIR = "@srcdir@" +TOP_BUILD_DIR = os.path.realpath(_BUILD_TOP_DIR) +TOP_SRC_DIR = os.path.realpath(os.path.join(_BUILD_TOP_DIR, 'py', _REL_TOP_SRC_DIR)) + +if _REL_SRC_DIR != ".": + sys.path.insert(0, _REL_SRC_DIR) diff --git a/sched/Makefile.am b/sched/Makefile.am index 97a15dc10b..88e5d9b4f3 100644 --- a/sched/Makefile.am +++ b/sched/Makefile.am @@ -97,8 +97,5 @@ fcgi_DEPENDENCIES = $(LIBRSA) $(LIB_SCHED) fcgi_CPPFLAGS = -include /usr/local/include/fcgi_stdio.h -D_USING_FCGI_ $(AM_CPPFLAGS) fcgi_LDADD = $(LDADD) $(RSA_LIBS) -lfcgi -lfcgi++ $(MYSQL_LIBS) -stop: - ln -s start stop - -status: - ln -s start status +$(srcdir)/stop $(srcdir)/status: $(srcdir)/start + ln -f -s start $@ diff --git a/sched/Makefile.in b/sched/Makefile.in index a3f2c350ea..62c080729c 100644 --- a/sched/Makefile.in +++ b/sched/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before @@ -1043,11 +1043,8 @@ uninstall-am: uninstall-binSCRIPTS uninstall-info-am $(LIBRSA): cd $(top_builddir)/RSAEuro/source; ${MAKE} librsaeuro.a -stop: - ln -s start stop - -status: - ln -s start status +$(srcdir)/stop $(srcdir)/status: $(srcdir)/start + ln -f -s start $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff --git a/test/.cvsignore b/test/.cvsignore index b27e97c6cd..b88bf16bff 100644 --- a/test/.cvsignore +++ b/test/.cvsignore @@ -1,3 +1,2 @@ version.inc -version.py testproxy.log diff --git a/test/Makefile.am b/test/Makefile.am index 4fd02d9608..95bf21e0ed 100644 --- a/test/Makefile.am +++ b/test/Makefile.am @@ -17,16 +17,13 @@ TESTS = test_sanity.py \ EXTRA_DIST = \ *.xml *wu *result *output *input \ - db_def_to_php db_def_to_py \ - *.php version.inc.in boinc.py test_*.py version.py.in \ - boinc_db.inc boinc_db.py + test_*.py \ + db_def_to_php *.php version.inc.in boinc_db.inc \ + test*.inc init.inc boinc_db.inc -BUILT_SOURCES = boinc_db.inc boinc_db.py +# BUILT_SOURCES = boinc_db.inc boinc_db.py -$(TESTS): version.py boinc_db.py +$(TESTS): ../py/version.py $(top_srcdir)/py/boinc_db.py boinc_db.inc: ../db/boinc_db.h ../lib/result_state.h cat $^ | ./db_def_to_php > $@ - -boinc_db.py: ../db/boinc_db.h ../lib/result_state.h - cat $^ | ./db_def_to_py > $@ diff --git a/test/Makefile.in b/test/Makefile.in index 67bb8cd14d..66a678265e 100644 --- a/test/Makefile.in +++ b/test/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before @@ -186,22 +186,19 @@ TESTS = test_sanity.py \ # TODO: phase out php stuff EXTRA_DIST = \ *.xml *wu *result *output *input \ - db_def_to_php db_def_to_py \ - *.php version.inc.in boinc.py test_*.py version.py.in \ - boinc_db.inc boinc_db.py + test_*.py \ + db_def_to_php *.php version.inc.in boinc_db.inc \ + test*.inc init.inc boinc_db.inc - -BUILT_SOURCES = boinc_db.inc boinc_db.py subdir = test ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs CONFIG_HEADER = $(top_builddir)/config.h -CONFIG_CLEAN_FILES = version.inc version.py +CONFIG_CLEAN_FILES = version.inc DIST_SOURCES = DIST_COMMON = README $(top_srcdir)/Makefile.incl Makefile.am \ - Makefile.in version.inc.in version.py.in -all: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) all-am + Makefile.in version.inc.in +all: all-am .SUFFIXES: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ Makefile.am $(top_srcdir)/Makefile.incl $(top_srcdir)/configure.ac $(ACLOCAL_M4) @@ -211,8 +208,6 @@ Makefile: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.in $(top_builddir)/config.s cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe) version.inc: $(top_builddir)/config.status version.inc.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ -version.py: $(top_builddir)/config.status version.py.in - cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ uninstall-info-am: tags: TAGS TAGS: @@ -328,13 +323,11 @@ distdir: $(DISTFILES) done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) check-TESTS -check: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) check-am +check: check-am all-am: Makefile installdirs: -install: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) install-am +install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am @@ -358,7 +351,6 @@ distclean-generic: maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." - -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) clean: clean-am clean-am: clean-generic mostlyclean-am @@ -416,13 +408,12 @@ uninstall-am: uninstall-info-am $(LIBRSA): cd $(top_builddir)/RSAEuro/source; ${MAKE} librsaeuro.a -$(TESTS): version.py boinc_db.py +# BUILT_SOURCES = boinc_db.inc boinc_db.py + +$(TESTS): ../py/version.py $(top_srcdir)/py/boinc_db.py boinc_db.inc: ../db/boinc_db.h ../lib/result_state.h cat $^ | ./db_def_to_php > $@ - -boinc_db.py: ../db/boinc_db.h ../lib/result_state.h - cat $^ | ./db_def_to_py > $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff --git a/test/boinc.py b/test/boinc.py deleted file mode 100644 index 2b111a91d9..0000000000 --- a/test/boinc.py +++ /dev/null @@ -1,1117 +0,0 @@ -## $Id$ - -# boinc.py -# -# A set of classes for testing BOINC. These classes let you create multiple -# projects and multiple hosts (all running on a single machine), add -# applications and work units, run the system, and verify that the results are -# correct. -# -# See doc/test.html for details - -# the module MySQLdb can be installed on debian with "apt-get install python2.2-mysqldb" - -# TODO: make things work if build_dir != src_dir - -from version import * -from boinc_db import * -import os, sys, glob, time, shutil, re, atexit, traceback, random, signal -import MySQLdb - -errors = 0 - -HAVE_INIT = 0 -def init(): - global HAVE_INIT - global VERBOSE, AUTO_SETUP, USER_NAME, INSTALL_METHOD, DELETE, TTY, OVERWRITE - global PORT, PROXY_PORT, install_function - global AUTO_SETUP_BASEDIR, MINISERV_BASEDIR, MINISERV_PORT, MINISERV_BASEURL - global KEY_DIR, PROJECTS_DIR, CGI_DIR, HTML_DIR, HOSTS_DIR, CGI_URL, HTML_URL - - if HAVE_INIT: return - HAVE_INIT = 1 - - # TODO: use @build_dir@ from autoconf. - os.chdir(os.path.dirname(sys.argv[0])) - if not os.path.exists("test_uc.py"): - raise SystemExit('Could not find boinc_db.py') - - # VERBOSE: 0 = print nothing - # 1 = print some (default - # if output is a tty, overwrite lines. - # 2 = print all - - VERBOSE = int(get_env_var('BOINC_TEST_VERBOSE', 1)) - AUTO_SETUP = int(get_env_var("BOINC_TEST_AUTO_SETUP",0)) #### - USER_NAME = get_env_var("BOINC_TEST_USER_NAME", '') or get_env_var("USER") - INSTALL_METHOD = get_env_var("BOINC_TEST_INSTALL_METHOD", 'hardlink').lower() - DELETE = get_env_var("BOINC_TEST_DELETE", 'if-successful').lower() - - TTY = os.isatty(1) - OVERWRITE = TTY and VERBOSE==1 - - PROXY_PORT = 16000 + (os.getpid() % 1000) - - if INSTALL_METHOD == 'copy': - install_function = shutil.copy - elif INSTALL_METHOD == 'link' or INSTALL_METHOD == 'hardlink': - install_function = my_link - elif INSTALL_METHOD == 'symlink' or INSTALL_METHOD == 'softlink': - install_function = my_symlink - else: - fatal_error("Invalid BOINC_TEST_INSTALL_METHOD: %s"%BOINC_TEST_INSTALL_METHOD) - - if AUTO_SETUP: - AUTO_SETUP_BASEDIR = 'run-%d'%os.getpid() - verbose_echo(0, "Creating testbed in %s"%AUTO_SETUP_BASEDIR) - os.mkdir(AUTO_SETUP_BASEDIR) - try: - os.unlink('run') - except OSError: - pass - try: - os.symlink(AUTO_SETUP_BASEDIR, 'run') - except OSError: - pass - MINISERV_BASEDIR = os.path.join(os.getcwd(), AUTO_SETUP_BASEDIR) - MINISERV_PORT = 15000 + (os.getpid() % 1000) - MINISERV_BASEURL = 'http://localhost:%d/' % MINISERV_PORT - miniserver = MiniServer(MINISERV_PORT, MINISERV_BASEDIR) - miniserver.run() - #KEY_DIR = os.path.join(MINISERV_BASEDIR, 'keys') - PROJECTS_DIR = os.path.join(MINISERV_BASEDIR, 'projects') - CGI_DIR = os.path.join(MINISERV_BASEDIR, 'cgi-bin') - HTML_DIR = os.path.join(MINISERV_BASEDIR, 'html') - HOSTS_DIR = os.path.join(MINISERV_BASEDIR, 'hosts') - CGI_URL = os.path.join(MINISERV_BASEURL, 'cgi-bin') - HTML_URL = os.path.join(MINISERV_BASEURL, 'html') - PORT = MINISERV_PORT - map(os.mkdir, [PROJECTS_DIR, CGI_DIR, HTML_DIR, HOSTS_DIR]) - else: - KEY_DIR = get_env_var("BOINC_TEST_KEY_DIR") - PROJECTS_DIR = get_env_var("BOINC_TEST_PROJECTS_DIR") - CGI_DIR = get_env_var("BOINC_TEST_CGI_DIR") - HTML_DIR = get_env_var("BOINC_TEST_HTML_DIR") - HOSTS_DIR = get_env_var("BOINC_TEST_HOSTS_DIR") - CGI_URL = get_env_var("BOINC_TEST_CGI_URL") - HTML_URL = get_env_var("BOINC_TEST_HTML_URL") - m = re.compile('http://[^/]+:(\d+)/').match(HTML_URL) - PORT = m and m.group(1) or 80 - -prev_overwrite = False -def verbose_echo(level, line): - global prev_overwrite - if level == 0: - if prev_overwrite: - print - print line - prev_overwrite = False - elif VERBOSE >= level: - if OVERWRITE: - print "\r ", - print "\r", line, - sys.stdout.flush() - prev_overwrite = True - else: - print line - -def fatal_error(msg): - global errors - errors += 1 - verbose_echo(0, "FATAL ERROR: "+msg) - sys.exit(1) - -def error(msg, fatal=0): - global errors - if fatal: fatal_error(msg) - errors += 1 - verbose_echo(0, "ERROR: "+msg) - -def verbose_sleep(msg, wait): - front = msg + ' [sleep ' - back = ']' - for i in range(1,wait+1): - verbose_echo(1, msg + ' [sleep ' + ('.'*i).ljust(wait) + ']') - time.sleep(1) - -def get_env_var(name, default = None): - value = os.environ.get(name, default) - if value == None: - print "Environment variable %s not defined" % name - sys.exit(1) - return value - -def shell_call(cmd, doexec=False, failok=False): - if doexec: - os.execl('/bin/sh', 'sh', '-c', cmd) - error("Command failed: "+cmd) - os._exit(1) - if os.system(cmd): - error("Command failed: "+cmd, fatal=(not failok)) - return 1 - return 0 - -def verbose_shell_call(cmd, doexec=False, failok=False): - verbose_echo(2, " "+cmd) - return shell_call(cmd, doexec, failok) - -def proxerize(url, t=True): - if t: - r = re.compile('http://[^/]*/') - return r.sub('http://localhost:%d/'%PROXY_PORT, url) - else: - return url - -def destpath(src,dest): - if dest.endswith('/'): - return dest + os.path.basename(src) - else: - return dest - -# my_symlink and my_link just add the filename to the exception object if one -# is raised - don't know why it's not already there -def my_symlink(src,dest): - dest = destpath(src,dest) - try: - os.symlink(src,dest) - except OSError, e: - e.filename = dest - raise - -def my_link(src,dest): - dest = destpath(src,dest) - try: - os.link(src,dest) - except OSError, e: - e.filename = dest - raise - -def use_cgi_proxy(): - global CGI_URL - init() - CGI_URL = proxerize(CGI_URL) -def use_html_proxy(): - global HTML_URL - init() - HTML_URL = proxerize(HTML_URL) - -def check_exists(file): - if not os.path.isfile(file): - error("file doesn't exist: " + file) - return 1 - return 0 -def check_deleted(file): - if os.path.isfile(file): - error("file wasn't deleted: " + file) - return 1 - return 0 -def check_files_match(file, correct, descr=''): - if not os.path.isfile(file): - error("file doesn't exist: %s (needs to match %s)" % (file,correct)) - return 1 - if os.system("diff %s %s" % (file, correct)): - error("File mismatch%s: %s %s" % (descr, file, correct)) - return 1 - else: - verbose_echo(2, "Files match%s: %s %s" % (descr, file, correct)) - return 0 - -def check_program_exists(prog): - if not os.path.isfile(prog): - fatal_error(""" -Executable not found: %s -Did you `make' yet? -""" % prog) -def check_core_client_executable(): - check_program_exists(os.path.join(SRC_DIR, 'client', CLIENT_BIN_FILENAME)) -def check_app_executable(app): - check_program_exists(os.path.join(SRC_DIR, 'apps', app)) - -def macro_substitute(macro, replacement, infile, outfile): - open(outfile, 'w').write(open(infile).read().replace(macro, replacement)) -def macro_substitute_inplace(macro, replacement, inoutfile): - old = inoutfile + '.old' - os.rename(inoutfile, old) - macro_substitute(macro, replacement, old, inoutfile) - -def make_executable(name): - os.chmod(name, 755) -def force_symlink(src, dest): - if os.path.exists(dest): - os.unlink(dest) - my_symlink(src, dest) -def rmtree(dir): - if os.path.exists(dir): - shutil.rmtree(dir) - -def _remove_trail(s, suffix): - if s.endswith(suffix): - return s[:-len(suffix)] - else: - return s - -def _url_to_filename(url): - return _remove_trail(url.replace('http://','').replace('/','_'),'_') -def account_file_name(url): - return 'account_' + _url_to_filename(url) + '.xml' - -def run_tool(cmd): - verbose_shell_call(os.path.join(SRC_DIR, 'tools', cmd)) - -def _gen_key_p(private_key, public_key): - shell_call("%s/crypt_prog -genkey 1024 %s %s >/dev/null" % ( - os.path.join(SRC_DIR, 'lib'), - private_key, - public_key)) -def _gen_key(key): - _gen_key_p(key+'_private', key+'_public') - -def get_int(s): - '''Convert a string to an int; return 0 on error.''' - try: return int(sys.argv[1]) - except: return 0 - -def unique(list): - d = {} - for i in list: - d[i] = 1 - return d.keys() - -def map_xml(dic, keys): - if not isinstance(dic,dict): - dic = dic.__dict__ - s = '' - for key in keys: - s += "<%s>%s\n" % (key, dic[key], key) - return s[:-1] - -def generate_shmem_key(): - return '0x1111%x' % random.randrange(0,2**16) - -def _check_vars(dict, **names): - for key in names: - value = names[key] - if not key in dict: - if value == None: - raise SystemExit('error in test script: required parameter "%s" not specified'%key) - dict[key] = value - for key in dict: - if not key in names: - raise SystemExit('error in test script: extraneous parameter "%s" unknown'%key) - -class STARTS_WITH(str): - pass - -class MATCH_REGEXPS(list): - def match(self, text): - '''Returns True iff each regexp in self is in text''' - for r in self: - R = re.compile(r) - if not R.search(text): - return False - return True - pass - -def dict_match(dic, resultdic): - '''match values in DIC against RESULTDIC''' - if not isinstance(dic, dict): - dic = dic.__dict__ - for key in dic.keys(): - expected = dic[key] - try: - found = resultdic[key] - except KeyError: - error("Database query result didn't have key '%s'!" % key) - continue - if isinstance(expected,STARTS_WITH): - match = found.startswith(expected) - elif isinstance(expected,MATCH_REGEXPS): - match = expected.match(found) - else: - match = found == expected - if not match: - id = resultdic.get('id', '?') - if str(found).count('\n') or str(expected).count('\n'): - format = """result %s: unexpected %s: - -%s - -(expected:) - -%s""" - else: - format = "result %s: unexpected %s '%s' (expected '%s')" - error( format % (id, key, found, expected)) - -def _db_query(db, query): - db.query(query) - result = db.use_result() - return result and result.fetch_row(0,1) - -def num_results(db, q=""): - return _db_query(db, "select count(*) from result "+q)[0]['count(*)'] -def num_wus_left(db): - return num_results(db, "where server_state=%d"%RESULT_SERVER_STATE_UNSENT) -def num_results_done(db): - return num_results(db, "where server_state=%d"%RESULT_SERVER_STATE_OVER) - - -class Platform: - def __init__(self, name, user_friendly_name=None): - self.name = name - self.user_friendly_name = user_friendly_name or name - -class CoreVersion: - def __init__(self): - self.version = 1 - self.platform = Platform(PLATFORM) - self.exec_dir = os.path.join(SRC_DIR, 'client') - self.exec_name = CLIENT_BIN_FILENAME - -class App: - def __init__(self, name): - assert(name) - self.name = name - -class AppVersion: - def __init__(self, app, version = 1): - self.exec_names = [] - self.exec_dir = os.path.join(SRC_DIR, 'apps') - self.exec_names = [app.name] - self.app = app - self.version = 1 - self.platform = Platform(PLATFORM) - -class ProjectList(list): - def run(self): map(lambda i: i.run(), self) - def check(self): map(lambda i: i.check(), self) - def stop(self): map(lambda i: i.maybe_stop(), self) - def get_progress(self): - s = "Running core client - results [done/total]:" - for db in self.dbs: - s += " [%d/%d]" % (num_results_done(db), num_results(db)) - return s - def start_progress_meter(self): - self.dbs = map(lambda i: i.db_open(), self) - self.rm = ResultMeter(self.get_progress) - def stop_progress_meter(self): - self.rm.stop() - -all_projects = ProjectList() - -class Project: - def __init__(self, works, users=None, hosts=None, - short_name=None, long_name=None, core_versions=None, - apps=None, app_versions=None, appname=None, - resource_share=None, redundancy=None, - add_to_list=True): - init() - if add_to_list: - all_projects.append(self) - self.config_options = [] - self.config_daemons = [] - self.short_name = short_name or 'test_'+appname - self.long_name = long_name or 'Project ' + self.short_name.replace('_',' ').capitalize() - self.db_passwd = '' - self.generate_keys = AUTO_SETUP - self.shmem_key = generate_shmem_key() - self.resource_share = resource_share or 1 - self.redundancy = redundancy or 2 - self.output_level = 3 - - self.master_url = os.path.join(HTML_URL , self.short_name , '') - self.download_url = os.path.join(HTML_URL , self.short_name , 'download') - self.cgi_url = CGI_URL - self.upload_url = os.path.join(self.cgi_url , self.short_name , 'file_upload_handler') - self.scheduler_url = os.path.join(self.cgi_url , self.short_name , 'cgi') - self.project_dir = os.path.join(PROJECTS_DIR , self.short_name) - self.download_dir = os.path.join(self.project_dir , 'download') - self.upload_dir = os.path.join(self.project_dir , 'upload') - self.key_dir = os.path.join(self.project_dir , 'keys') - self.user_name = USER_NAME - self.db_name = self.user_name + '_' + self.short_name - self.project_php_file = 'html_user/project.inc.sample' - self.project_specific_prefs_php_file = 'html_user/project_specific_prefs.inc.sample' - - self.core_versions = core_versions or [CoreVersion()] - self.app_versions = app_versions or [AppVersion(App(appname))] - self.apps = apps or unique(map(lambda av: av.app, self.app_versions)) - self.platforms = [Platform(PLATFORM)] - self.works = works - self.users = users or [User()] - self.hosts = hosts or [Host()] - # convenience vars: - self.app_version = self.app_versions[0] - self.app = self.apps[0] - self.host = self.hosts[0] - self.work = self.works[0] - self.user = self.users[0] - - self.started = False - - def srcdir(self, *dirs): - return apply(os.path.join,(SRC_DIR,)+dirs) - def dir(self, *dirs): - return apply(os.path.join,(self.project_dir,)+dirs) - - def mkdir(self, dir): - os.mkdir(self.dir(dir)) - def chmod(self, dir): - os.chmod(self.dir(dir), 0777) - def copy(self, source, dest): - '''SOURCE is relative to SOURCE_DIR; DEST is relative to project dir''' - install_function(self.srcdir(source), self.dir(dest)) - def copyglob(self, source, dest = None, failok=False): - dest = self.dir(dest or os.path.join(source, '')) - for src in glob.glob(os.path.join(self.srcdir(source), '*')): - if not os.path.isdir(src): - install_function(src, dest) - - def run_db_script(self, script): - shell_call('sed -e s/BOINC_DB_NAME/%s/ %s | mysql' - % (self.db_name, self.srcdir('db', script))) - def db_open(self): - return MySQLdb.connect(db=self.db_name) - - def create_keys(self): - _gen_key(self.dir('keys/upload')) - _gen_key(self.dir('keys/code_sign')) - - def install_project(self, scheduler_file = None): - if not AUTO_SETUP: - verbose_echo(1, "Deleting previous test runs") - rmtree(self.dir()) - - verbose_echo(1, "Setting up server: creating directories"); - # make the CGI writeable in case scheduler writes req/reply files - - map(self.mkdir, - [ '', 'cgi-bin', 'bin', 'upload', 'download', 'keys', 'html_ops', 'html_user', 'log']) - map(self.chmod, - [ '', 'cgi-bin', 'upload', 'log' ]) - - if self.generate_keys: - verbose_echo(1, "Setting up server files: generating keys"); - self.create_keys() - else: - verbose_echo(1, "Setting up server files: copying keys"); - self.copyglob(KEY_DIR, os.path.join(self.key_dir,'')) - - # copy the user and administrative PHP files to the project dir, - verbose_echo(1, "Setting up server files: copying html directories") - - self.copyglob('html_user') - self.copyglob('html_ops') - self.copy('tools/country_select', 'html_user/') - self.mkdir('html_user/project_specific') - self.copy(self.project_php_file, - os.path.join('html_user', 'project_specific', 'project.inc')) - self.copy(self.project_specific_prefs_php_file, - os.path.join('html_user', 'project_specific', 'project_specific_prefs.inc')) - - my_symlink(self.download_dir, self.dir('html_user', 'download')) - - # Copy the sched server in the cgi directory with the cgi names given - # source_dir/html_usr/schedulers.txt - # - - verbose_echo(1, "Setting up server files: copying cgi programs"); - r = re.compile('([^<]+)', re.IGNORECASE) - if scheduler_file: - f = open(self.dir('html_user', scheduler_file)) - for line in f: - # not sure if this is what the scheduler file is supposed to - # mean - match = r.search(line) - if match: - cgi_name = match.group(1) - verbose_echo(2, "Setting up server files: copying " + cgi_name); - install_function('sched/cgi', os.path.join('cgi-bin', cgi_name,'')) - f.close() - else: - scheduler_file = 'schedulers.txt' - f = open(self.dir('html_user', scheduler_file), 'w') - print >>f, "" + self.scheduler_url, "" - f.close() - - - # copy all the backend programs - map(lambda (s): self.copy(os.path.join('sched', s), 'cgi-bin/'), - [ 'cgi', 'file_upload_handler', ]) - map(lambda (s): self.copy(os.path.join('sched', s), 'bin/'), - [ 'make_work', - 'feeder', 'timeout_check', 'validate_test', - 'file_deleter', 'assimilator', 'start', 'stop', - 'boinc_config.py', - 'grep_logs' ]) - - verbose_echo(1, "Setting up database") - map(self.run_db_script, [ 'drop.sql', 'schema.sql', 'constraints.sql' ]) - - db = self.db_open() - db.query("insert into project(short_name, long_name) values('%s', '%s')" %( - self.short_name, self.long_name)); - - verbose_echo(1, "Setting up database: adding %d user(s)" % len(self.users)) - for user in self.users: - if user.project_prefs: - pp = "\n%s\n\n" % user.project_prefs - else: - pp = '' - if user.global_prefs: - gp = "\n%s\n\n" % user.global_prefs - else: - gp = '' - - db.query(("insert into user values (0, %d, '%s', '%s', '%s', " + - "'Peru', '12345', 0, 0, 0, '%s', '%s', 0, 'home', '', 0, 1)") % ( - time.time(), - user.email_addr, - user.name, - user.authenticator, - gp, - pp)) - - verbose_echo(1, "Setting up database: adding %d apps(s)" % len(self.apps)) - for app in self.apps: - check_app_executable(app.name) - db.query("insert into app(name, create_time) values ('%s', %d)" %( - app.name, time.time())) - - self.platforms = unique(map(lambda a: a.platform, self.app_versions)) - verbose_echo(1, "Setting up database: adding %d platform(s)" % len(self.platforms)) - - db.close() - - for platform in self.platforms: - run_tool("add platform -db_name %s -platform_name %s -user_friendly_name '%s'" %( - self.db_name, platform.name, platform.user_friendly_name)) - - verbose_echo(1, "Setting up database: adding %d core version(s)" % len(self.core_versions)) - for core_version in self.core_versions: - run_tool(("add core_version -db_name %s -platform_name %s" + - " -version %s -download_dir %s -download_url %s -exec_dir %s" + - " -exec_files %s") % - (self.db_name, core_version.platform.name, - core_version.version, - self.download_dir, - self.download_url, - core_version.exec_dir, - core_version.exec_name)) - - verbose_echo(1, "Setting up database: adding %d app version(s)" % len(self.app_versions)) - for app_version in self.app_versions: - app = app_version.app - cmd = ("add app_version -db_name %s -app_name '%s'" + - " -platform_name %s -version %s -download_dir %s -download_url %s" + - " -code_sign_keyfile %s -exec_dir %s -exec_files") % ( - self.db_name, app.name, app_version.platform.name, - app_version.version, - self.download_dir, - self.download_url, - os.path.join(self.key_dir, 'code_sign_private'), - app_version.exec_dir) - for exec_name in app_version.exec_names: - cmd += ' ' + exec_name - run_tool(cmd) - - verbose_echo(1, "Setting up server files: writing config files"); - - config = map_xml(self, - [ 'db_name', 'db_passwd', 'shmem_key', - 'key_dir', 'download_url', 'download_dir', - 'upload_url', 'upload_dir', 'project_dir', 'user_name', - 'cgi_url', - 'output_level' ]) - self.config_options = config.split('\n') - self.write_config() - - # edit "index.php" in the user HTML directory to have the right file - # as the source for scheduler_urls; default is schedulers.txt - - macro_substitute_inplace('FILE_NAME', scheduler_file, - self.dir('html_user', 'index.php')) - - # create symbolic links to the CGI and HTML directories - verbose_echo(1, "Setting up server files: linking cgi programs") - force_symlink(self.dir('cgi-bin'), os.path.join(CGI_DIR, self.short_name)) - force_symlink(self.dir('html_user'), os.path.join(HTML_DIR, self.short_name)) - force_symlink(self.dir('html_ops'), os.path.join(HTML_DIR, self.short_name+'_admin')) - - # show the URLs for user and admin sites - admin_url = os.path.join("html_user", self.short_name+'_admin/') - - verbose_echo(2, "Master URL: " + self.master_url) - verbose_echo(2, "Admin URL: " + admin_url) - - def install_works(self): - for work in self.works: - work.install(self) - - def install_hosts(self): - for host in self.hosts: - for user in self.users: - host.add_user(user, self) - host.install() - - def install(self): - self.install_project() - self.install_works() - self.install_hosts() - - def http_password(self, user, password): - 'Adds http password protection to the html_ops directory' - passwd_file = self.dir('html_ops', '.htpassword') - f = open(self.dir('html_ops', '.htaccess'), 'w') - print >>f, "AuthName '%s Administration'" % self.long_name - print >>f, "AuthType Basic" - print >>f, "AuthUserFile %s" % passwd_file - print >>f, "require valid-user" - f.close() - shell_call("htpassword -bc %s %s %s" % (passwd_file, user, password)) - - def _disable(self, *path): - '''Temporarily disable a file to test exponential backoff''' - path = apply(self.dir, path) - os.rename(path, path+'.disabled') - def _reenable(self, *path): - path = apply(self.dir, path) - os.rename(path+'.disabled', path) - - def disable_masterindex(self): - self._disable('html_user/index.php') - def reenable_masterindex(self): - self._reenable('html_user/index.php') - def disable_scheduler(self, num = ''): - self._disable('cgi-bin/cgi'+str(num)) - def reenable_scheduler(self, num = ''): - self._reenable('cgi-bin/cgi'+str(num)) - def disable_downloaddir(self, num = ''): - self._disable('download'+str(num)) - def reenable_downloaddir(self, num = ''): - self._reenable('download'+str(num)) - def disable_file_upload_handler(self, num = ''): - self._disable('cgi-bin/file_upload_handler'+str(num)) - def reenable_file_upload_handler(self, num = ''): - self._reenable('cgi-bin/file_upload_handler'+str(num)) - - def _run_sched_prog(self, prog, args='', logfile=None): - verbose_shell_call("cd %s && ./%s %s >> %s.log 2>&1" % - (self.dir('bin'), prog, args, (logfile or prog))) - def start_servers(self): - self.started = True - self._run_sched_prog('start', '-v --enable') - verbose_sleep("Starting servers for project '%s'" % self.short_name, 1) - self.read_server_pids() - - def read_server_pids(self): - pid_dir = self.dir('pid') - self.pids = {} - for pidfile in glob.glob(os.path.join(pid_dir, '*.pid')): - try: - pid = int(open(pidfile).readline()) - except: - pid = 0 - if pid: - progname = os.path.split(pidfile)[1].split('.')[0] - self.pids[progname] = pid - - def wait_server(self, progname, msg=None): - msg = msg or "Waiting for %s to finish..."%progname - verbose_echo(1, msg) - os.waitpid(self.pids[progname], 0) - verbose_echo(1, msg+" done.") - - def _build_sched_commandlines(self, progname, kwargs): - '''Given a KWARGS dictionary build a list of command lines string depending on the program.''' - each_app = False - if progname == 'feeder': - _check_vars(kwargs) - elif progname == 'timeout_check': - _check_vars(kwargs, app=self.app, nerror=5, ndet=5, nredundancy=5) - elif progname == 'make_work': - work = kwargs.get('work', self.work) - _check_vars(kwargs, cushion=None, redundancy=self.redundancy, - result_template=os.path.realpath(work.result_template), - wu_name=work.wu_template) - elif progname == 'validate_test': - _check_vars(kwargs, quorum=self.redundancy) - each_app = True - elif progname == 'file_deleter': - _check_vars(kwargs) - elif progname == 'assimilator': - _check_vars(kwargs) - each_app = True - else: - raise SystemExit("test script error: invalid progname '%s'"%progname) - cmdline = ' '.join(map(lambda k: '-%s %s'%(k,kwargs[k]), kwargs.keys())) - if each_app: - return map(lambda av: '-app %s %s'%(av.app.name,cmdline), self.app_versions) - else: - return [cmdline] - - def sched_run(self, prog, **kwargs): - for cmdline in self._build_sched_commandlines(prog, kwargs): - self._run_sched_prog(prog, '-d 3 -one_pass '+cmdline) - def sched_install(self, prog, **kwargs): - for cmdline in self._build_sched_commandlines(prog, kwargs): - self.config_daemons.append("%s -d 3 %s" %(prog, cmdline)) - self.write_config() - def sched_uninstall(self, prog): - self.config_daemons = filter(lambda l: l.find(prog)==-1, self.config_daemons) - self.write_config() - - def start_stripcharts(self): - map(lambda l: self.copy(os.path.join('stripchart', l), 'cgi-bin/'), - [ 'stripchart.cgi', 'stripchart', 'stripchart.cnf', - 'looper', 'db_looper', 'datafiles', 'get_load', 'dir_size' ]) - macro_substitute('BOINC_DB_NAME', self.db_name, self.srcdir('stripchart/samples/db_count'), - self.dir('bin/db_count')) - make_executable(self.dir('bin/db_count')) - - self._run_sched_prog('looper' , 'get_load 1' , 'get_load') - self._run_sched_prog('db_looper' , '"result" 1' , 'count_results') - self._run_sched_prog('db_looper' , '"workunit where assimilate_state=2" 1' , 'assimilated_wus') - self._run_sched_prog('looper' , '"dir_size ../download" 1' , 'download_size') - self._run_sched_prog('looper' , '"dir_size ../upload" 1' , 'upload_size') - - def stop(self): - verbose_echo(1,"Stopping server(s) for project '%s'"%self.short_name) - self._run_sched_prog('start', '-v --disable') - self.started = False - - def maybe_stop(self): - if self.started: self.stop() - - def write_config(self): - f = open(self.dir('config.xml'), 'w') - print >>f, '' - print >>f, ' ' - for line in self.config_options: - print >>f, " ", line - print >>f, ' ' - print >>f, ' ' - for daemon in self.config_daemons: - print >>f, " %s"%daemon - print >>f, ' ' - print >>f, '' - - def check_results(self, matchresult, expected_count=None): - '''MATCHRESULT should be a dictionary of columns to check, such as: - - server_state - stderr_out - exit_status - ''' - expected_count = expected_count or self.redundancy - db = self.db_open() - rows = _db_query(db,"select * from result") - for row in rows: - dict_match(matchresult, row) - db.close() - if len(rows) != expected_count: - error("expected %d results, but found %d" % (expected_count, len(rows))) - - def check_files_match(self, result, correct, count=None): - '''if COUNT is specified then [0,COUNT) is mapped onto the %d in RESULT''' - if count != None: - errs = 0 - for i in range(count): - errs += self.check_files_match(result%i, correct) - return errs - return check_files_match(self.dir(result), - correct, " for project '%s'"%self.short_name) - def check_deleted(self, file, count=None): - if count != None: - errs = 0 - for i in range(count): - errs += self.check_deleted(file%i) - return errs - return check_deleted(self.dir(file)) - def check_exists(self, file, count=None): - if count != None: - errs = 0 - for i in range(count): - errs += self.check_exists(file%i) - return errs - return check_exists(self.dir(file)) - -class User: - '''represents an account on a particular project''' - def __init__(self): - self.name = 'John' - self.email_addr = 'john@boinc.org' - self.authenticator = "3f7b90793a0175ad0bda68684e8bd136" - self.project_prefs = None - self.global_prefs = None - -class HostList(list): - def run(self): map(lambda i: i.run(), self) - -all_hosts = HostList() - -class Host: - def __init__(self, add_to_list=True): - if add_to_list: - all_hosts.append(self) - self.name = 'Commodore64' - self.users = [] - self.projects = [] - self.global_prefs = None - self.log_flags = 'log_flags.xml' - self.host_dir = os.path.join(HOSTS_DIR, self.name) - self.defargs = "-exit_when_idle -skip_cpu_benchmarks -debug_fake_exponential_backoff" - # self.defargs = "-exit_when_idle -skip_cpu_benchmarks -sched_retry_delay_min 1" - - def add_user(self, user, project): - self.users.append(user) - self.projects.append(project) - - def dir(self, *dirs): - return apply(os.path.join,(self.host_dir,)+dirs) - - def install(self): - rmtree(self.dir()) - os.mkdir(self.dir()) - - verbose_echo(1, "Setting up host '%s': creating account files" % self.name); - for (user,project) in map(None,self.users,self.projects): - filename = self.dir(account_file_name(project.master_url)) - verbose_echo(2, "Setting up host '%s': writing %s" % (self.name, filename)) - - f = open(filename, "w") - print >>f, "" - print >>f, map_xml(project, ['master_url']) - print >>f, map_xml(user, ['authenticator']) - if user.project_prefs: - print >>f, user.project_prefs - print >>f, "" - f.close() - - # copy log flags and global prefs, if any - if self.log_flags: - shutil.copy(self.log_flags, self.dir('log_flags.xml')) - if self.global_prefs: - shell_call("cp %s %s" % (self.global_prefs, self.dir('global_prefs.xml'))) - # shutil.copy(self.global_prefs, self.dir('global_prefs.xml')) - - def run(self, args='', asynch=False): - if asynch: - verbose_echo(1, "Running core client asynchronously") - pid = os.fork() - if pid: return pid - else: - verbose_echo(1, "Running core client") - verbose_shell_call("cd %s && %s %s %s > client.out 2> client.err" % ( - self.dir(), os.path.join(SRC_DIR, 'client', CLIENT_BIN_FILENAME), - self.defargs, args)) - if asynch: os._exit(0) - - def read_cpu_time_file(filename): - try: - return float(open(self.dir(filename)).readline()) - except: - return 0 - - def check_file_present(self, project, filename): - check_exists(self.dir('projects', - _url_to_filename(project.master_url), - filename)) - -class Work: - def __init__(self, redundancy=1): - self.input_files = [] - self.rsc_iops = 1.8e12 - self.rsc_fpops = 1e13 - self.rsc_memory = 1e7 - self.rsc_disk = 1e7 - self.delay_bound = 1000 - self.redundancy = redundancy - self.app = None - - def install(self, project): - verbose_echo(1, "Installing work <%s> in project '%s'" %( - self.wu_template, project.short_name)) - if not self.app: - self.app = project.app_versions[0].app - for input_file in unique(self.input_files): - install_function(input_file, os.path.join(project.download_dir,'')) - - # simulate multiple data servers by making symbolic links to the - # download directory - r = re.compile('([^<]+)<', re.IGNORECASE) - for line in open(self.wu_template): - match = r.search(line) - if match: - newdir = project.download_dir+match.group(1) - verbose_echo(2, "Linking "+newdir) - os.symlink(project.download_dir, newdir) - - # simulate multiple data servers by making copies of the file upload - # handler - r = re.compile('([^<]+)<', re.IGNORECASE) - for line in open(self.wu_template): - match = r.search(line) - if match: - handler = project.srcdir('sched', 'file_upload_handler') - newhandler = handler + match.group(1) - verbose_echo(2, "Linking "+newhandler) - os.symlink(handler, newhandler) - - cmd = "create_work -db_name %s -download_dir %s -upload_url %s -download_url %s -keyfile %s -appname %s -rsc_iops %.0f -rsc_fpops %.0f -rsc_disk %.0f -wu_template %s -result_template %s -redundancy %s -wu_name %s -delay_bound %d" % ( - project.db_name, project.download_dir, project.upload_url, - project.download_url, os.path.join(project.key_dir,'upload_private'), - self.app.name, self.rsc_iops, self.rsc_fpops, self.rsc_disk, - self.wu_template, self.result_template, self.redundancy, self.wu_template, - self.delay_bound) - - for input_file in self.input_files: - cmd += ' ' + input_file - - run_tool(cmd) - -class ResultMeter: - def __init__(self, func, args=[], delay=.1): - '''Forks to print a progress meter''' - self.pid = os.fork() - if self.pid: - atexit.register(self.stop) - return - while True: - verbose_echo(1, apply(func, args)) - time.sleep(delay) - def stop(self): - if self.pid: - os.kill(self.pid, 9) - self.pid = 0 - -def run_check_all(): - '''Run all projects, run all hosts, check all projects, stop all projects.''' - atexit.register(all_projects.stop) - all_projects.run() - all_projects.start_progress_meter() - if os.environ.get('TEST_STOP_BEFORE_HOST_RUN'): - raise SystemExit, 'Stopped due to $TEST_STOP_BEFORE_HOST_RUN' - all_hosts.run() - all_projects.stop_progress_meter() - all_projects.check() - all_projects.stop() - -def delete_test(): - '''Delete all test data''' - if AUTO_SETUP: - verbose_echo(1, "Deleting testbed %s."%AUTO_SETUP_BASEDIR) - shutil.rmtree(AUTO_SETUP_BASEDIR) - -class Proxy: - def __init__(self, code, cgi=0, html=0, start=1): - self.pid = 0 - self.code = code - if cgi: use_cgi_proxy() - if html: use_html_proxy() - if start: self.start() - def start(self): - self.pid = os.fork() - if not self.pid: - verbose_shell_call( - "exec ./testproxy %d localhost:%d '%s' 2>testproxy.log" % ( - PROXY_PORT, PORT, self.code), - doexec=True) - verbose_sleep("Starting proxy server", 1) - # check if child process died - (pid,status) = os.waitpid(self.pid, os.WNOHANG) - if pid: - fatal_error("testproxy failed") - self.pid = 0 - else: - atexit.register(self.stop) - def stop(self): - if self.pid: - verbose_echo(1, "Stopping proxy server") - try: - os.kill(self.pid, 2) - except OSError: - verbose_echo(0, "Couldn't kill pid %d" % self.pid) - self.pid = 0 - - -class MiniServer: - def __init__(self, port, doc_root, miniserv_root=None): - self.port = port - self.doc_root = doc_root - self.miniserv_root = miniserv_root or os.path.join(doc_root,'miniserv') - if not os.path.isdir(self.miniserv_root): - os.mkdir(self.miniserv_root) - self.config_file = os.path.join(self.miniserv_root, 'miniserv.conf') - self.log_file = os.path.join(self.miniserv_root, 'miniserv.log') - self.pid_file = os.path.join(self.miniserv_root, 'miniserv.pid') - print >>open(self.config_file,'w'), ''' -root=%(doc_root)s -mimetypes=/etc/mime.types -port=%(port)d -addtype_cgi=internal/cgi -addtype_php=internal/cgi -index_docs=index.html index.htm index.cgi index.php -logfile=%(log_file)s -pidfile=%(pid_file)s -logtime=168 -ssl=0 -#logout=/etc/webmin/logout-flag -#libwrap=1 -#alwaysresolve=1 -#allow=127.0.0.1 -blockhost_time=300 -no_pam=0 -logouttime=5 -passdelay=1 -blockhost_failures=3 -log=1 -logclear= -loghost=1 -''' %self.__dict__ - - def run(self): - verbose_echo(0,"Running miniserv on localhost:%d"%self.port) - if os.spawnl(os.P_WAIT, os.path.join(SRC_DIR, 'test/miniserv.pl'), 'miniserv', self.config_file): - raise SystemExit("Couldn't spawn miniserv") - atexit.register(self.stop) - - def stop(self): - verbose_echo(1,"Killing miniserv") - try: - pid = int(open(self.pid_file).readline()) - os.kill(pid, signal.SIGINT) - except Exception, e: - print >>sys.stderr, "Couldn't stop miniserv:", e - -def test_msg(msg): - print - print "-- Testing", msg, '-'*(66-len(msg)) - init() - -def test_done(): - global errors - init() - if sys.__dict__.get('last_traceback'): - if sys.last_type == KeyboardInterrupt: - errors += 0.1 - sys.stderr.write("\nTest canceled by user\n") - else: - errors += 1 - sys.stderr.write("\nException thrown - bug in test scripts?\n") - if errors: - verbose_echo(0, "ERRORS: %d" % errors) - if DELETE == 'always': - delete_test() - sys.exit(int(errors)) - else: - verbose_echo(1, "Passed test!") - if OVERWRITE: - print - if DELETE == 'if-successful' or DELETE == 'always': - delete_test() - if OVERWRITE: - print - sys.exit(0) - -atexit.register(test_done) diff --git a/test/test_1sec.py b/test/test_1sec.py index d09f77aa7b..bbec4a630e 100755 --- a/test/test_1sec.py +++ b/test/test_1sec.py @@ -6,7 +6,6 @@ # time is divided correctly between projects The client should do work for # project 2 5 times faster than for project 1 -from boinc import * from test_uc import * if __name__ == '__main__': diff --git a/test/test_backend.py b/test/test_backend.py index a67df8ee3b..207e064915 100755 --- a/test/test_backend.py +++ b/test/test_backend.py @@ -7,7 +7,6 @@ # on a large batch of workunits. Confirms that credit is correctly granted # and that unneeded files are deleted -from boinc import * from test_uc import * import time, os diff --git a/test/test_concat.py b/test/test_concat.py index f4d5fc2474..d9b1415976 100755 --- a/test/test_concat.py +++ b/test/test_concat.py @@ -4,7 +4,7 @@ # tests whether command-line arg passing works -from boinc import * +from testbase import * class WorkConcat(Work): def __init__(self, redundancy=2): @@ -13,14 +13,14 @@ class WorkConcat(Work): self.result_template = "concat_result" self.input_files = ['input']*2 -class ProjectConcat(Project): +class ProjectConcat(TestProject): def __init__(self, works=None, users=None, hosts=None, redundancy=2): - Project.__init__(self, - appname = 'concat', - works = works or [WorkConcat()], - users = users, - hosts = hosts, - redundancy=redundancy) + TestProject.__init__(self, + appname = 'concat', + works = works or [WorkConcat()], + users = users, + hosts = hosts, + redundancy=redundancy) def check(self): self.sched_run('validate_test') diff --git a/test/test_sanity.py b/test/test_sanity.py index ffba32ea1f..89eef0fb62 100755 --- a/test/test_sanity.py +++ b/test/test_sanity.py @@ -2,9 +2,8 @@ ## $Id$ +from testbase import * import urllib, random -import boinc -from boinc import * # test makes sure that testing framework is sane @@ -30,19 +29,19 @@ if __name__ == '__main__': check_app_executable("1sec") verbose_echo(1, "Checking directories") - for d in ['PROJECTS_DIR', #'KEY_DIR', - 'CGI_DIR', 'HTML_DIR', 'HOSTS_DIR']: - dir = boinc.__dict__[d] + for d in ['projects_dir', + 'cgi_dir', 'html_dir', 'hosts_dir']: + dir = options.__dict__[d] if not os.path.isdir(dir): error("%s doesn't exist: %s" % (d, dir)) magic = "Foo %x Bar" % random.randint(0,2**16) - html_path = os.path.join(boinc.HTML_DIR, 'test_sanity.txt') - html_url = os.path.join(boinc.HTML_URL, 'test_sanity.txt') + html_path = os.path.join(options.html_dir, 'test_sanity.txt') + html_url = os.path.join(options.html_url, 'test_sanity.txt') html_proxy_url = proxerize(html_url) - cgi_path = os.path.join(boinc.CGI_DIR, 'test_sanity_cgi') - cgi_url = os.path.join(boinc.CGI_URL, 'test_sanity_cgi') + cgi_path = os.path.join(options.cgi_dir, 'test_sanity_cgi') + cgi_url = os.path.join(options.cgi_url, 'test_sanity_cgi') verbose_echo(1, "Checking webserver setup: non-cgi") print >>open(html_path,'w'), magic diff --git a/test/test_uc.py b/test/test_uc.py index b4e0635e74..1b624b9ff6 100755 --- a/test/test_uc.py +++ b/test/test_uc.py @@ -6,7 +6,7 @@ # output is reported correctly Also tests if water levels are working # correctly -from boinc import * +from testbase import * class UserUC(User): def __init__(self): @@ -55,18 +55,18 @@ class ResultUCError: self.stderr_out = MATCH_REGEXPS([ """ APP: upper_case: starting, argc \\d+"""]) -class ProjectUC(Project): +class ProjectUC(TestProject): def __init__(self, works=None, users=None, hosts=None, short_name=None, long_name=None, redundancy=2, resource_share=1): - Project.__init__(self, - appname = 'upper_case', - works = works or [WorkUC(redundancy=redundancy)], - users = users or [UserUC()], - hosts = hosts, - short_name=short_name, long_name=long_name, - redundancy=redundancy, resource_share=resource_share - ) + TestProject.__init__(self, + appname = 'upper_case', + works = works or [WorkUC(redundancy=redundancy)], + users = users or [UserUC()], + hosts = hosts, + short_name=short_name, long_name=long_name, + redundancy=redundancy, resource_share=resource_share + ) def check(self, result=ResultUC()): '''Check results uploaded correctly''' diff --git a/test/testbase.py b/test/testbase.py new file mode 100644 index 0000000000..2c0c448d5c --- /dev/null +++ b/test/testbase.py @@ -0,0 +1,610 @@ +## $Id$ + +# testbase.py +# +# A set of classes for testing BOINC. These classes let you create multiple +# projects and multiple hosts (all running on a single machine), add +# applications and work units, run the system, and verify that the results are +# correct. +# +# See doc/test.html for details + +# the module MySQLdb can be installed on debian with "apt-get install python2.2-mysqldb" + +# TODO: make things work if build_dir != src_dir + +import sys +sys.path.append('../py') +try: + from version import * +except ImportError: + raise SystemExit("""testbase.py: Couldn't import version.py + +This file is built from py/version.py.in by configure. + +Perhaps you did not run configure, or you configured in a different directory, +or you are running from the wrong directory.""") + +from boinc import * +import atexit, traceback, signal + +# raise SystemExit('hi') +# raise Exception('Hi') + +options.have_init_t = False + +def test_init(): + if options.have_init_t: return + options.have_init_t = True + + if not os.path.exists('test_uc.py'): + os.chdir(os.path.join(TOP_SRC_DIR,'test')) + if not os.path.exists('test_uc.py'): + raise SystemExit('Could not find boinc_db.py anywhere') + + options.auto_setup = int(get_env_var("BOINC_TEST_AUTO_SETUP",0)) #### + options.user_name = get_env_var("BOINC_TEST_USER_NAME", '') or get_env_var("USER") + options.delete_testbed = get_env_var("BOINC_TEST_DELETE", 'if-successful').lower() + options.install_method = get_env_var("BOINC_TEST_INSTALL_METHOD", 'hardlink').lower() + options.echo_verbose = int(get_env_var("BOINC_TEST_VERBOSE", '1')) + options.proxy_port = 16000 + (os.getpid() % 1000) + + if options.auto_setup: + options.auto_setup_basedir = 'run-%d'%os.getpid() + verbose_echo(0, "Creating testbed in %s"%options.auto_setup_basedir) + os.mkdir(options.auto_setup_basedir) + try: + os.unlink('run') + except OSError: + pass + try: + os.symlink(options.auto_setup_basedir, 'run') + except OSError: + pass + options.miniserv_basedir = os.path.join(os.getcwd(), options.auto_setup_basedir) + options.miniserv_port = 15000 + (os.getpid() % 1000) + options.miniserv_baseurl = 'http://localhost:%d/' % options.miniserv_port + miniserver = MiniServer(options.miniserv_port, options.miniserv_basedir) + miniserver.run() + options.projects_dir = os.path.join(options.miniserv_basedir, 'projects') + options.cgi_dir = os.path.join(options.miniserv_basedir, 'cgi-bin') + options.html_dir = os.path.join(options.miniserv_basedir, 'html') + options.hosts_dir = os.path.join(options.miniserv_basedir, 'hosts') + options.cgi_url = os.path.join(options.miniserv_baseurl, 'cgi-bin') + options.html_url = os.path.join(options.miniserv_baseurl, 'html') + options.port = options.miniserv_port + map(os.mkdir, [options.projects_dir, options.cgi_dir, options.html_dir, options.hosts_dir]) + else: + options.key_dir = get_env_var("BOINC_TEST_KEY_DIR") + options.projects_dir = get_env_var("BOINC_TEST_PROJECTS_DIR") + options.cgi_dir = get_env_var("BOINC_TEST_CGI_DIR") + options.html_dir = get_env_var("BOINC_TEST_HTML_DIR") + options.hosts_dir = get_env_var("BOINC_TEST_HOSTS_DIR") + options.cgi_url = get_env_var("BOINC_TEST_CGI_URL") + options.html_url = get_env_var("BOINC_TEST_HTML_URL") + m = re.compile('http://[^/]+:(\d+)/').match(options.html_url) + options.port = m and m.group(1) or 80 + + init() + +def proxerize(url, t=True): + if t: + r = re.compile('http://[^/]*/') + return r.sub('http://localhost:%d/'%options.proxy_port, url) + else: + return url + +def use_cgi_proxy(): + test_init() + options.cgi_url = proxerize(options.cgi_url) +def use_html_proxy(): + test_init() + options.html_url = proxerize(options.html_url) + +def check_exists(file): + if not os.path.isfile(file): + error("file doesn't exist: " + file) + return 1 + return 0 +def check_deleted(file): + if os.path.isfile(file): + error("file wasn't deleted: " + file) + return 1 + return 0 +def check_files_match(file, correct, descr=''): + if not os.path.isfile(file): + error("file doesn't exist: %s (needs to match %s)" % (file,correct)) + return 1 + if os.system("diff %s %s" % (file, correct)): + error("File mismatch%s: %s %s" % (descr, file, correct)) + return 1 + else: + verbose_echo(2, "Files match%s: %s %s" % (descr, file, correct)) + return 0 + +def _check_vars(dict, **names): + for key in names: + value = names[key] + if not key in dict: + if value == None: + raise SystemExit('error in test script: required parameter "%s" not specified'%key) + dict[key] = value + for key in dict: + if not key in names: + raise SystemExit('error in test script: extraneous parameter "%s" unknown'%key) + +class STARTS_WITH(str): + pass + +class MATCH_REGEXPS(list): + def match(self, text): + '''Returns True iff each regexp in self is in text''' + for r in self: + R = re.compile(r) + if not R.search(text): + return False + return True + pass + +def dict_match(dic, resultdic): + '''match values in DIC against RESULTDIC''' + if not isinstance(dic, dict): + dic = dic.__dict__ + for key in dic.keys(): + expected = dic[key] + try: + found = resultdic[key] + except KeyError: + error("Database query result didn't have key '%s'!" % key) + continue + if isinstance(expected,STARTS_WITH): + match = found.startswith(expected) + elif isinstance(expected,MATCH_REGEXPS): + match = expected.match(found) + else: + match = found == expected + if not match: + id = resultdic.get('id', '?') + if str(found).count('\n') or str(expected).count('\n'): + format = """result %s: unexpected %s: + +%s + +(expected:) + +%s""" + else: + format = "result %s: unexpected %s '%s' (expected '%s')" + error( format % (id, key, found, expected)) + +class ProjectList(list): + def run(self): map(lambda i: i.run(), self) + def check(self): map(lambda i: i.check(), self) + def stop(self): map(lambda i: i.maybe_stop(), self) + def get_progress(self): + s = "Running core client - results [done/total]:" + for db in self.dbs: + s += " [%d/%d]" % (num_results_done(db), num_results(db)) + return s + def start_progress_meter(self): + self.dbs = map(lambda i: i.db_open(), self) + self.rm = ResultMeter(self.get_progress) + def stop_progress_meter(self): + self.rm.stop() + +all_projects = ProjectList() + +class TestProject(Project): + def __init__(self, works, users=None, hosts=None, + add_to_list=True, **kwargs): + test_init() + if add_to_list: + all_projects.append(self) + + kwargs['short_name'] = kwargs.get('short_name') or 'test_'+kwargs['appname'] + kwargs['long_name'] = kwargs.get('long_name') or 'Project ' + kwargs['short_name'].replace('_',' ').capitalize() + self.works = works + self.users = users or [User()] + self.hosts = hosts or [Host()] + # convenience vars: + self.work = self.works[0] + self.user = self.users[0] + self.host = self.hosts[0] + + apply(Project.__init__, [self], kwargs) + self.started = False + + def init_install(self): + if not options.auto_setup: + verbose_echo(1, "Deleting previous test runs") + rmtree(self.dir()) + self.drop_db_if_exists() + + def query_create_keys(self): + '''Overrides Project::query_create_keys() to always return true''' + return True + + def install_project_users(self): + db = self.db_open() + verbose_echo(1, "Setting up database: adding %d user(s)" % len(self.users)) + for user in self.users: + if user.project_prefs: + pp = "\n%s\n\n" % user.project_prefs + else: + pp = '' + if user.global_prefs: + gp = "\n%s\n\n" % user.global_prefs + else: + gp = '' + + db.query(("insert into user values (0, %d, '%s', '%s', '%s', " + + "'Peru', '12345', 0, 0, 0, '%s', '%s', 0, 'home', '', 0, 1)") % ( + time.time(), + user.email_addr, + user.name, + user.authenticator, + gp, + pp)) + + def install_works(self): + for work in self.works: + work.install(self) + + def install_hosts(self): + for host in self.hosts: + for user in self.users: + host.add_user(user, self) + host.install() + + def install(self): + self.init_install() + self.install_project() + self.install_project_users() + self.install_works() + self.install_hosts() + + def _disable(self, *path): + '''Temporarily disable a file to test exponential backoff''' + path = apply(self.dir, path) + os.rename(path, path+'.disabled') + def _reenable(self, *path): + path = apply(self.dir, path) + os.rename(path+'.disabled', path) + + def disable_masterindex(self): + self._disable('html_user/index.php') + def reenable_masterindex(self): + self._reenable('html_user/index.php') + def disable_scheduler(self, num = ''): + self._disable('cgi-bin/cgi'+str(num)) + def reenable_scheduler(self, num = ''): + self._reenable('cgi-bin/cgi'+str(num)) + def disable_downloaddir(self, num = ''): + self._disable('download'+str(num)) + def reenable_downloaddir(self, num = ''): + self._reenable('download'+str(num)) + def disable_file_upload_handler(self, num = ''): + self._disable('cgi-bin/file_upload_handler'+str(num)) + def reenable_file_upload_handler(self, num = ''): + self._reenable('cgi-bin/file_upload_handler'+str(num)) + + def check_results(self, matchresult, expected_count=None): + '''MATCHRESULT should be a dictionary of columns to check, such as: + + server_state + stderr_out + exit_status + ''' + expected_count = expected_count or self.redundancy + db = self.db_open() + rows = db_query(db,"select * from result") + for row in rows: + dict_match(matchresult, row) + db.close() + if len(rows) != expected_count: + error("expected %d results, but found %d" % (expected_count, len(rows))) + + def check_files_match(self, result, correct, count=None): + '''if COUNT is specified then [0,COUNT) is mapped onto the %d in RESULT''' + if count != None: + errs = 0 + for i in range(count): + errs += self.check_files_match(result%i, correct) + return errs + return check_files_match(self.dir(result), + correct, " for project '%s'"%self.short_name) + def check_deleted(self, file, count=None): + if count != None: + errs = 0 + for i in range(count): + errs += self.check_deleted(file%i) + return errs + return check_deleted(self.dir(file)) + def check_exists(self, file, count=None): + if count != None: + errs = 0 + for i in range(count): + errs += self.check_exists(file%i) + return errs + return check_exists(self.dir(file)) + + + +class User: + '''represents an account on a particular project''' + def __init__(self): + self.name = 'John' + self.email_addr = 'john@boinc.org' + self.authenticator = "3f7b90793a0175ad0bda68684e8bd136" + self.project_prefs = None + self.global_prefs = None + +class HostList(list): + def run(self): map(lambda i: i.run(), self) + +all_hosts = HostList() + +class Host: + def __init__(self, add_to_list=True): + if add_to_list: + all_hosts.append(self) + self.name = 'Commodore64' + self.users = [] + self.projects = [] + self.global_prefs = None + self.log_flags = 'log_flags.xml' + self.host_dir = os.path.join(options.hosts_dir, self.name) + self.defargs = "-exit_when_idle -skip_cpu_benchmarks -debug_fake_exponential_backoff" + # self.defargs = "-exit_when_idle -skip_cpu_benchmarks -sched_retry_delay_min 1" + + def add_user(self, user, project): + self.users.append(user) + self.projects.append(project) + + def dir(self, *dirs): + return apply(os.path.join,(self.host_dir,)+dirs) + + def install(self): + rmtree(self.dir()) + os.mkdir(self.dir()) + + verbose_echo(1, "Setting up host '%s': creating account files" % self.name); + for (user,project) in map(None,self.users,self.projects): + filename = self.dir(account_file_name(project.master_url)) + verbose_echo(2, "Setting up host '%s': writing %s" % (self.name, filename)) + + f = open(filename, "w") + print >>f, "" + print >>f, map_xml(project, ['master_url']) + print >>f, map_xml(user, ['authenticator']) + if user.project_prefs: + print >>f, user.project_prefs + print >>f, "" + f.close() + + # copy log flags and global prefs, if any + if self.log_flags: + shutil.copy(self.log_flags, self.dir('log_flags.xml')) + if self.global_prefs: + shell_call("cp %s %s" % (self.global_prefs, self.dir('global_prefs.xml'))) + # shutil.copy(self.global_prefs, self.dir('global_prefs.xml')) + + def run(self, args='', asynch=False): + if asynch: + verbose_echo(1, "Running core client asynchronously") + pid = os.fork() + if pid: return pid + else: + verbose_echo(1, "Running core client") + verbose_shell_call("cd %s && %s %s %s > client.out 2> client.err" % ( + self.dir(), builddir('client', options.client_bin_filename), + self.defargs, args)) + if asynch: os._exit(0) + + def read_cpu_time_file(filename): + try: + return float(open(self.dir(filename)).readline()) + except: + return 0 + + def check_file_present(self, project, filename): + check_exists(self.dir('projects', + _url_to_filename(project.master_url), + filename)) + +class Work: + def __init__(self, redundancy=1): + self.input_files = [] + self.rsc_iops = 1.8e12 + self.rsc_fpops = 1e13 + self.rsc_memory = 1e7 + self.rsc_disk = 1e7 + self.delay_bound = 1000 + self.redundancy = redundancy + self.app = None + + def install(self, project): + verbose_echo(1, "Installing work <%s> in project '%s'" %( + self.wu_template, project.short_name)) + if not self.app: + self.app = project.app_versions[0].app + for input_file in unique(self.input_files): + install(input_file, os.path.join(project.download_dir,'')) + + # simulate multiple data servers by making symbolic links to the + # download directory + r = re.compile('([^<]+)<', re.IGNORECASE) + for line in open(self.wu_template): + match = r.search(line) + if match: + newdir = project.download_dir+match.group(1) + verbose_echo(2, "Linking "+newdir) + os.symlink(project.download_dir, newdir) + + # simulate multiple data servers by making copies of the file upload + # handler + r = re.compile('([^<]+)<', re.IGNORECASE) + for line in open(self.wu_template): + match = r.search(line) + if match: + handler = project.srcdir('sched', 'file_upload_handler') + newhandler = handler + match.group(1) + verbose_echo(2, "Linking "+newhandler) + os.symlink(handler, newhandler) + + cmd = "create_work -db_name %s -download_dir %s -upload_url %s -download_url %s -keyfile %s -appname %s -rsc_iops %.0f -rsc_fpops %.0f -rsc_disk %.0f -wu_template %s -result_template %s -redundancy %s -wu_name %s -delay_bound %d" % ( + project.db_name, project.download_dir, project.upload_url, + project.download_url, os.path.join(project.key_dir,'upload_private'), + self.app.name, self.rsc_iops, self.rsc_fpops, self.rsc_disk, + self.wu_template, self.result_template, self.redundancy, self.wu_template, + self.delay_bound) + + for input_file in self.input_files: + cmd += ' ' + input_file + + run_tool(cmd) + +class ResultMeter: + def __init__(self, func, args=[], delay=.1): + '''Forks to print a progress meter''' + self.pid = os.fork() + if self.pid: + atexit.register(self.stop) + return + while True: + verbose_echo(1, apply(func, args)) + time.sleep(delay) + def stop(self): + if self.pid: + os.kill(self.pid, 9) + self.pid = 0 + +def run_check_all(): + '''Run all projects, run all hosts, check all projects, stop all projects.''' + atexit.register(all_projects.stop) + all_projects.run() + all_projects.start_progress_meter() + if os.environ.get('TEST_STOP_BEFORE_HOST_RUN'): + raise SystemExit, 'Stopped due to $TEST_STOP_BEFORE_HOST_RUN' + all_hosts.run() + all_projects.stop_progress_meter() + all_projects.check() + all_projects.stop() + +def delete_test(): + '''Delete all test data''' + if options.auto_setup: + verbose_echo(1, "Deleting testbed %s."%options.auto_setup_basedir) + shutil.rmtree(options.auto_setup_basedir) + +class Proxy: + def __init__(self, code, cgi=0, html=0, start=1): + self.pid = 0 + self.code = code + if cgi: use_cgi_proxy() + if html: use_html_proxy() + if start: self.start() + def start(self): + self.pid = os.fork() + if not self.pid: + verbose_shell_call( + "exec ./testproxy %d localhost:%d '%s' 2>testproxy.log" % ( + options.proxy_port, options.port, self.code), + doexec=True) + verbose_sleep("Starting proxy server", 1) + # check if child process died + (pid,status) = os.waitpid(self.pid, os.WNOHANG) + if pid: + fatal_error("testproxy failed") + self.pid = 0 + else: + atexit.register(self.stop) + def stop(self): + if self.pid: + verbose_echo(1, "Stopping proxy server") + try: + os.kill(self.pid, 2) + except OSError: + verbose_echo(0, "Couldn't kill pid %d" % self.pid) + self.pid = 0 + + +class MiniServer: + def __init__(self, port, doc_root, miniserv_root=None): + self.port = port + self.doc_root = doc_root + self.miniserv_root = miniserv_root or os.path.join(doc_root,'miniserv') + if not os.path.isdir(self.miniserv_root): + os.mkdir(self.miniserv_root) + self.config_file = os.path.join(self.miniserv_root, 'miniserv.conf') + self.log_file = os.path.join(self.miniserv_root, 'miniserv.log') + self.pid_file = os.path.join(self.miniserv_root, 'miniserv.pid') + print >>open(self.config_file,'w'), ''' +root=%(doc_root)s +mimetypes=/etc/mime.types +port=%(port)d +addtype_cgi=internal/cgi +addtype_php=internal/cgi +index_docs=index.html index.htm index.cgi index.php +logfile=%(log_file)s +pidfile=%(pid_file)s +logtime=168 +ssl=0 +#logout=/etc/webmin/logout-flag +#libwrap=1 +#alwaysresolve=1 +#allow=127.0.0.1 +blockhost_time=300 +no_pam=0 +logouttime=5 +passdelay=1 +blockhost_failures=3 +log=1 +logclear= +loghost=1 +''' %self.__dict__ + + def run(self): + verbose_echo(0,"Running miniserv on localhost:%d"%self.port) + if os.spawnl(os.P_WAIT, srcdir('test/miniserv.pl'), 'miniserv', self.config_file): + raise SystemExit("Couldn't spawn miniserv") + atexit.register(self.stop) + + def stop(self): + verbose_echo(1,"Killing miniserv") + try: + pid = int(open(self.pid_file).readline()) + os.kill(pid, signal.SIGINT) + except Exception, e: + print >>sys.stderr, "Couldn't stop miniserv:", e + +def test_msg(msg): + print + print "-- Testing", msg, '-'*(66-len(msg)) + test_init() + +def test_done(): + test_init() + if sys.__dict__.get('last_traceback'): + if sys.last_type == KeyboardInterrupt: + errors.count += 0.1 + sys.stderr.write("\nTest canceled by user\n") + else: + errors.count += 1 + sys.stderr.write("\nException thrown - bug in test scripts?\n") + if errors.count: + verbose_echo(0, "ERRORS.COUNT: %d" % errors.count) + if options.delete_testbed == 'always': + delete_test() + sys.exit(int(errors.count)) + else: + verbose_echo(1, "Passed test!") + if options.echo_overwrite: + print + if options.delete_testbed == 'if-successful' or options.delete_testbed == 'always': + delete_test() + if options.echo_overwrite: + print + sys.exit(0) + +atexit.register(test_done) diff --git a/tools/Makefile.am b/tools/Makefile.am index 444bc50ef5..b504d56493 100644 --- a/tools/Makefile.am +++ b/tools/Makefile.am @@ -4,6 +4,10 @@ include $(top_srcdir)/Makefile.incl bin_PROGRAMS = create_work add country_select +EXTRA_DIST = make_project.py + +# TODO: use libboinc for these: + create_work_SOURCES = \ create_work.C \ backend_lib.C \ diff --git a/tools/Makefile.in b/tools/Makefile.in index 24f5a54c47..b308602131 100644 --- a/tools/Makefile.in +++ b/tools/Makefile.in @@ -46,6 +46,7 @@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ +BUILD_TOP_DIR = @BUILD_TOP_DIR@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ @@ -88,7 +89,6 @@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ -SOURCE_TOP_DIR = @SOURCE_TOP_DIR@ STATIC_FLAGS = @STATIC_FLAGS@ STRIP = @STRIP@ VERSION = @VERSION@ @@ -163,7 +163,7 @@ AM_CPPFLAGS = \ -I$(MYSQL_INCLUDES) \ -I$(MYSQL_INCLUDES2) \ -I$(MYSQL_INCLUDES3) \ - -include $(top_srcdir)/config.h + -include $(top_builddir)/config.h # this is useful as a dependency to make sure librsaeuro gets compiled before @@ -172,6 +172,10 @@ LIBRSA = $(top_builddir)/RSAEuro/source/librsaeuro.a bin_PROGRAMS = create_work add country_select +EXTRA_DIST = make_project.py + + +# TODO: use libboinc for these: create_work_SOURCES = \ create_work.C \ backend_lib.C \ diff --git a/tools/make_project b/tools/make_project new file mode 100755 index 0000000000..e7c605a592 --- /dev/null +++ b/tools/make_project @@ -0,0 +1,159 @@ +#!/usr/bin/env python + +# $Id$ +# Creates a new BOINC project. + +import sys, os, getopt +sys.path.append('../py') +from version import * +from boinc import * + +argv0 = sys.argv[0] +HOME = os.path.expanduser('~') + +HELP = """ +syntax: %(argv0)s [options] project-dir-name 'Project Long Name'] + +Creates a new project with given name with everything running on a single +server. + +Misc options: + --verbose={0,1,2} default: 1 + -v alias for --verbose=2 + --no_query accept all directories without querying + +Dir-options: + --base default: $HOME (%(HOME)s) + --key_dir default: BASE/keys + --project_root default: BASE/projects/PROJECT + --url_base REQUIRED; e.g.: http://maggie.ssl.berkeley.edu/ + + --html_user_url default: URL_BASE/PROJECT/ + --html_ops_url default: URL_BASE/PROJECT_ops/ + --cgi_url default: URL_BASE/PROJECT_cgi/ + +Project-specific directory structure that you probably don't need to change: + + --bin_dir default: PROJECT_ROOT/bin + --cgi_bin_dir default: PROJECT_ROOT/cgi-bin + --html_user_dir default: PROJECT_ROOT/html_user + --html_ops_dir default: PROJECT_ROOT/html_ops + --download_dir default: PROJECT_ROOT/download + --upload_dir default: PROJECT_ROOT/upload + --log_dir default: PROJECT_ROOT/log + --pid_dir default: PROJECT_ROOT/pid + +E.g. if you run make_project --base $HOME/boinc --url_base http://boink/ yah 'YETI @ Home' + +Then upload_dir = $HOME/boinc/projects/yah/upload +and cgi_url = http://boink/yah_cgi/ + +By default, directory options will be queried if they do not exist yet. + +""" %locals() + +def syntax_error(): + raise SystemExit('See "%s --help" for help\n' % sys.argv[0]) + +def usage(): + print HELP + raise SystemExit + +try: + opts, args = getopt.getopt(sys.argv[1:], + 'hv', + [ 'help', + 'verbose=', + 'no_query', + 'base=', + 'key_dir=', + 'project_root=', + 'url_base=', + 'html_user_url=', + 'html_ops_url=', + 'cgi_url=', + 'bin_dir=', + 'cgi_bin_dir=', + 'html_user_dir=', + 'html_ops_dir=', + 'download_dir=', + 'upload_dir=', + 'log_dir=', + 'pid_dir=' ]) +except getopt.GetoptError: + syntax_error() + +options.url_base = None + +for o,a in opts: + if o == '-h' or o == '--help': usage() + elif o == 'v': options.echo_verbose = 2 + elif o == 'verbose': options.echo_verbose = int(a) + elif o == 'no_query': options.no_query = True + elif o == 'base': options.base = a + elif o == 'key_dir': options.key_dir = a + elif o == 'project_root': options.project_root = a + elif o == 'url_base': options.url_base = a + elif o == 'html_user_url': options.html_user_url = a + elif o == 'html_ops_url': options.html_ops_url = a + elif o == 'cgi_url': options.cgi_url = a + elif o == 'bin_dir': options.bin_dir = a + elif o == 'cgi_bin_dir': options.cgi_bin_dir = a + elif o == 'html_user_dir': options.html_user_dir = a + elif o == 'html_ops_dir': options.html_ops_dir = a + elif o == 'download_dir': options.download_dir = a + elif o == 'upload_dir': options.upload_dir = a + elif o == 'log_dir': options.log_dir = a + elif o == 'pid_dir': options.pid_dir = a + else: + raise SystemExit('internal error') + +if len(args) != 2 or not options.url_base: + syntax_error() + +(project_shortname, project_longname) = args + + + + + +# if BLAH: +# print """I created the directories BLAH and BLAH. You need to add this (or similar) to your Apache config.httpd: + +# Alias /%(project)s %(BOINC_HTML_USER_DIR)s +# ScriptAlias /%(project)s_cgi %(BOINC_CGI_BIN_DIR)s + + + +# # Note: projects/*/keys/ should NOT be readable! + +# Options Indexes FollowSymlinks MultiViews +# AllowOverride None +# Order allow,deny +# Allow from all +# + +# +# Options FollowSymLinks +# AllowOverride AuthConfig +# Order allow,deny +# Allow from all +# + + +## TODO: first time: by default query all options with raw_input() ; use --no-query to use only environ vars + + +## TODO: save settings to a file. + + +# #options.* : +# KEY_DIR = get_env_var("BOINC_TEST_KEY_DIR") +# PROJECTS_DIR = get_env_var("BOINC_TEST_PROJECTS_DIR") +# CGI_DIR = get_env_var("BOINC_TEST_CGI_DIR") +# HTML_DIR = get_env_var("BOINC_TEST_HTML_DIR") +# HOSTS_DIR = get_env_var("BOINC_TEST_HOSTS_DIR") +# CGI_URL = get_env_var("BOINC_TEST_CGI_URL") +# HTML_URL = get_env_var("BOINC_TEST_HTML_URL") +# m = re.compile('http://[^/]+:(\d+)/').match(HTML_URL) +# PORT = m and m.group(1) or 80