summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobin H. Johnson <robbat2@gentoo.org>2008-09-18 04:13:38 +0000
committerRobin H. Johnson <robbat2@gentoo.org>2008-09-18 04:13:38 +0000
commit5ffc00a7a87287cb7c0eca35fe3d77583500d7f0 (patch)
tree267adb4278bb410ebee48d6ee1177726765f4115 /app-backup/amanda/files
parentppc64 for swig-1.3.34 and ppc for swig-1.3.36 (diff)
downloadgentoo-2-5ffc00a7a87287cb7c0eca35fe3d77583500d7f0.tar.gz
gentoo-2-5ffc00a7a87287cb7c0eca35fe3d77583500d7f0.tar.bz2
gentoo-2-5ffc00a7a87287cb7c0eca35fe3d77583500d7f0.zip
Ready for prime-time, with S3 patches tested by Lisa Seelye <lisa@thedoh.com> (former Gentoo dev :-).
(Portage version: 2.2_rc8/cvs/Linux 2.6.27-rc1-10246-gca5de40 x86_64)
Diffstat (limited to 'app-backup/amanda/files')
-rw-r--r--app-backup/amanda/files/s3-list-keys.diff61
-rw-r--r--app-backup/amanda/files/s3.c.part2.diff15
2 files changed, 76 insertions, 0 deletions
diff --git a/app-backup/amanda/files/s3-list-keys.diff b/app-backup/amanda/files/s3-list-keys.diff
new file mode 100644
index 000000000000..c75da2623f4d
--- /dev/null
+++ b/app-backup/amanda/files/s3-list-keys.diff
@@ -0,0 +1,61 @@
+diff --git a/device-src/s3.c b/device-src/s3.c
+index dfd29dc..7a3b7d3 100644
+--- a/device-src/s3.c
++++ b/device-src/s3.c
+@@ -605,8 +605,10 @@ buffer_writefunction(void *ptr, size_t size, size_t nmemb, void *stream)
+ guint bytes_needed = data->buffer_pos + new_bytes;
+
+ /* error out if the new size is greater than the maximum allowed */
+- if (data->max_buffer_size && bytes_needed > data->max_buffer_size)
+- return 0;
++ if (data->max_buffer_size && bytes_needed > data->max_buffer_size) {
++ g_warning("S3 request exceeded %d bytes; CURL error follows.", data->max_buffer_size);
++ return 0; /* returning zero signals an error to libcurl */
++ }
+
+ /* reallocate if necessary. We use exponential sizing to make this
+ * happen less often. */
+@@ -618,7 +620,7 @@ buffer_writefunction(void *ptr, size_t size, size_t nmemb, void *stream)
+ data->buffer = g_realloc(data->buffer, new_size);
+ data->buffer_len = new_size;
+ }
+- g_return_val_if_fail(data->buffer, 0); /* returning zero signals an error to libcurl */
++ g_return_val_if_fail(data->buffer, 0);
+
+ /* actually copy the data to the buffer */
+ memcpy(data->buffer + data->buffer_pos, ptr, new_bytes);
+@@ -1186,8 +1188,7 @@ list_fetch(S3Handle *hdl,
+ const char *resource,
+ const char *prefix,
+ const char *delimiter,
+- const char *marker,
+- const char *max_keys)
++ const char *marker)
+ {
+ char *urldelim = "?";
+ char *uri = g_strdup(resource);
+@@ -1212,13 +1213,13 @@ list_fetch(S3Handle *hdl,
+ urldelim = "&";
+ }
+ if (max_keys) {
+- if (!list_build_url_component(&uri, urldelim, "max-keys", max_keys)) goto cleanup;
++ if (!list_build_url_component(&uri, urldelim, "max-keys", "1024")) goto cleanup;
+ urldelim = "&";
+ }
+
+ /* and perform the request on that URI */
+ result = perform_request(hdl, resource, uri, "GET", NULL,
+- 0, MAX_ERROR_RESPONSE_LEN, 0, result_handling);
++ 0, 0, 0, result_handling);
+
+ cleanup:
+ if (uri) g_free(uri);
+@@ -1251,7 +1252,7 @@ s3_list_keys(S3Handle *hdl,
+ /* Loop until S3 has given us the entire picture */
+ do {
+ /* get some data from S3 */
+- result = list_fetch(hdl, resource, prefix, delimiter, thunk.next_marker, NULL);
++ result = list_fetch(hdl, resource, prefix, delimiter, thunk.next_marker);
+ if (result != S3_RESULT_OK) goto cleanup;
+
+ /* run the parser over it */
diff --git a/app-backup/amanda/files/s3.c.part2.diff b/app-backup/amanda/files/s3.c.part2.diff
new file mode 100644
index 000000000000..2141f922747e
--- /dev/null
+++ b/app-backup/amanda/files/s3.c.part2.diff
@@ -0,0 +1,15 @@
+--- s3.c.orig 2008-09-12 13:20:26.000000000 -0400
++++ s3.c 2008-09-12 13:20:36.000000000 -0400
+@@ -1212,10 +1212,10 @@
+ if (!list_build_url_component(&uri, urldelim, "marker", marker)) goto cleanup;
+ urldelim = "&";
+ }
+- if (max_keys) {
++ // if (max_keys) {
+ if (!list_build_url_component(&uri, urldelim, "max-keys", "1024")) goto cleanup;
+ urldelim = "&";
+- }
++ // }
+
+ /* and perform the request on that URI */
+ result = perform_request(hdl, resource, uri, "GET", NULL,