From 349f27a39ec66e032aa4260671ec5a8c2946a71c Mon Sep 17 00:00:00 2001 From: Bruce Allen Date: Mon, 4 Aug 2008 23:30:37 +0000 Subject: [PATCH] scheduler: in resending lost results, must copy core client version. David -- this can probably be done better and might indicate that some other info is not being properly copied. Please review and fix if needed. Before this fix, scheduler was complaining about outdated core clients version 0 < min needed core 413. tools for scheduler logging: change cgi.log.X to scheduler.log.X in script that creates on-line logs for users to read to help diagnose problems. svn path=/trunk/boinc/; revision=15751 --- checkin_notes | 19 +++++++++++++++++++ sched/handle_request.C | 1 + sched/sched_send.C | 4 ++++ tools/makelog.sh | 14 +++++++------- 4 files changed, 31 insertions(+), 7 deletions(-) diff --git a/checkin_notes b/checkin_notes index 049e2c5eee..9ee796b8cb 100644 --- a/checkin_notes +++ b/checkin_notes @@ -6117,3 +6117,22 @@ David 4 Aug 2008 sched/ sched_send.C,h + +Bruce 5 Aug 2008 + - scheduler: in resending lost results, must copy core + client version. David -- this can probably be done + better and might indicate that some other info is not + being properly copied. Please review and fix if needed. + Before this fix, scheduler.log was complaining about + outdated core clients version 0 < min needed core 413. + + - tools for scheduler logging: change cgi.log.X to scheduler.log.X + in script that creates on-line logs for users to read + to help diagnose problems. + + tools/ + makelog.sh + sched/ + sched_send.C + handle_request.C + diff --git a/sched/handle_request.C b/sched/handle_request.C index 6a7bd6665e..02cf2cb340 100644 --- a/sched/handle_request.C +++ b/sched/handle_request.C @@ -1324,6 +1324,7 @@ void process_request( reply.wreq.nresults_on_host = sreq.other_results.size(); if (sreq.have_other_results_list) { if (config.resend_lost_results) { + reply.wreq.core_client_version = 100*sreq.core_client_major_version + sreq.core_client_minor_version; if (resend_lost_work(sreq, reply)) { ok_to_send_work = false; } diff --git a/sched/sched_send.C b/sched/sched_send.C index e97475ab61..7b26f2cf0a 100644 --- a/sched/sched_send.C +++ b/sched/sched_send.C @@ -174,6 +174,10 @@ BEST_APP_VERSION* get_app_version( if (av.appid != wu.appid) continue; if (av.platformid != p->id) continue; if (reply.wreq.core_client_version < av.min_core_version) { + log_messages.printf(MSG_DEBUG, + "outdated client version %d < min core version %d\n", + reply.wreq.core_client_version, av.min_core_version + ); reply.wreq.outdated_core = true; continue; } diff --git a/tools/makelog.sh b/tools/makelog.sh index 9521f4cf3b..5f544c4a5b 100755 --- a/tools/makelog.sh +++ b/tools/makelog.sh @@ -44,17 +44,17 @@ while [ true ] ; do echo " " >> $filepath echo " " >> $filepath -# determine which files to search. Normally we just search cgi.log, but if the timestamp of cgi.log -# and cgi.log.0 are within 300 seconds of each other, then we search both. The 10,000,000 case will +# determine which files to search. Normally we just search scheduler.log, but if the timestamp of scheduler.log +# and scheduler.log.0 are within 600 seconds of each other, then we search both. The 10,000,000 case will # handle what happens if either $cgilogtime or $cgilog0time is not properly defined. - export cgilog0time=`ls --time-style=+%s -l ../log_*/cgi.log.0 | awk '{print $6}'` + export cgilog0time=`ls --time-style=+%s -l ../log_*/scheduler.log.0 | awk '{print $6}'` export cgilogtime=$cgilog0time - export cgilogtime=`ls --time-style=+%s -l ../log_*/cgi.log | awk '{print $6}'` + export cgilogtime=`ls --time-style=+%s -l ../log_*/scheduler.log | awk '{print $6}'` export deltatime=$(($cgilogtime-$cgilog0time)) - export filelist="../log_*/cgi.log" - if [ $deltatime -lt 300 ] || [ $deltatime -gt 10000000 ] ; then - export filelist="../log_*/cgi.log ../log_*/cgi.log.0" + export filelist="../log_*/scheduler.log" + if [ $deltatime -lt 600 ] || [ $deltatime -gt 10000000 ] ; then + export filelist="../log_*/scheduler.log ../log_*/scheduler.log.0" fi # now grep for all log entries from 3 minutes ago. Use sed to hide any sensitive info