diff --git a/checkin_notes b/checkin_notes
index 224a69e342..c0e06fc318 100644
--- a/checkin_notes
+++ b/checkin_notes
@@ -6349,3 +6349,11 @@ David 21 Sept 2011
schema.sql
py/Boinc/
setup_project.py
+
+David 21 Sept 2011
+ - client: more stuff for replicated trickle ups
+
+ client/
+ cs_trickle.cpp
+ client_types.cpp
+ http_curl.cpp
diff --git a/client/client_types.cpp b/client/client_types.cpp
index ab6659d141..4f08b1a1d7 100644
--- a/client/client_types.cpp
+++ b/client/client_types.cpp
@@ -178,7 +178,7 @@ static bool parse_rsc_param(XML_PARSER& xp, const char* end_tag, int& rsc_type,
//
int PROJECT::parse_state(XML_PARSER& xp) {
char buf[256];
- std::string sched_url;
+ std::string sched_url, stemp;
string str1, str2;
int retval, rt;
double x;
@@ -312,6 +312,9 @@ int PROJECT::parse_state(XML_PARSER& xp) {
if (xp.parse_bool("scheduler_rpc_in_progress", btemp)) continue;
if (xp.parse_bool("use_symlinks", use_symlinks)) continue;
if (xp.parse_bool("anonymous_platform", btemp)) continue;
+ if (xp.parse_string("trickle_up_url", stemp)) {
+ trickle_up_ops.push_back(new TRICKLE_UP_OP(stemp));
+ }
if (log_flags.unparsed_xml) {
msg_printf(0, MSG_INFO,
"[unparsed_xml] PROJECT::parse_state(): unrecognized: %s",
@@ -477,6 +480,13 @@ int PROJECT::write_state(MIOFILE& out, bool gui_rpc) {
" \n%s\n\n", code_sign_key
);
}
+ for (i=0; i%s\n",
+ t->url.c_str()
+ );
+ }
}
out.printf(
"\n"
diff --git a/client/cs_trickle.cpp b/client/cs_trickle.cpp
index bc480e0d1b..475a086a48 100644
--- a/client/cs_trickle.cpp
+++ b/client/cs_trickle.cpp
@@ -171,11 +171,13 @@ void send_replicated_trickles(PROJECT* p, string& msg) {
}
// A scheduler reply gave us a list of trickle handler URLs.
-// If this is different than the list we currently have, replace it.
+// Add and remove as needed.
//
void update_trickle_up_urls(PROJECT* p, vector &urls) {
unsigned int i, j;
- bool lists_equal = true;
+
+ // add new URLs
+ //
for (i=0; i &urls) {
}
}
if (!found) {
- lists_equal = false;
+ p->trickle_up_ops.push_back(new TRICKLE_UP_OP(url));
break;
}
}
- if (lists_equal) {
- for (j=0; jtrickle_up_ops.size(); j++) {
- TRICKLE_UP_OP *t = p->trickle_up_ops[j];
- bool found = false;
- for (i=0; iurl == url) {
- found = true;
- break;
- }
- }
- if (!found) {
- lists_equal = false;
+
+ // remove old URLs
+ //
+ vector::iterator iter = p->trickle_up_ops.begin();
+ while (iter != p->trickle_up_ops.end()) {
+ TRICKLE_UP_OP *t = *iter;
+ bool found = false;
+ for (i=0; iurl == url) {
+ found = true;
break;
}
}
- }
- if (lists_equal) return;
- for (j=0; jtrickle_up_ops.size(); j++) {
- TRICKLE_UP_OP *t = p->trickle_up_ops[j];
- delete t;
- }
- p->trickle_up_ops.clear();
- for (i=0; itrickle_up_ops.push_back(new TRICKLE_UP_OP(url));
+ if (!found) {
+ gstate.http_ops->remove(&(t->gui_http->http_op));
+ delete t;
+ iter = p->trickle_up_ops.erase(iter);
+ } else {
+ iter++;
+ }
}
}
diff --git a/client/http_curl.cpp b/client/http_curl.cpp
index 015b39ae82..80e8aad675 100644
--- a/client/http_curl.cpp
+++ b/client/http_curl.cpp
@@ -740,7 +740,6 @@ int HTTP_OP_SET::remove(HTTP_OP* p) {
}
iter++;
}
- msg_printf(NULL, MSG_INTERNAL_ERROR, "HTTP operation not found");
return ERR_NOT_FOUND;
}