← Back to team overview

txaws-dev team mailing list archive

[Merge] lp:~djfroofy/txaws/86392-s3copyobj into lp:txaws

 

Drew Smathers has proposed merging lp:~djfroofy/txaws/86392-s3copyobj into lp:txaws.

Requested reviews:
  txAWS Developers (txaws-dev)
Related bugs:
  #486392 Add missing copy object S3 API call
  https://bugs.launchpad.net/bugs/486392

-- 
https://code.launchpad.net/~djfroofy/txaws/86392-s3copyobj/+merge/35882
Your team txAWS Developers is requested to review the proposed merge of lp:~djfroofy/txaws/86392-s3copyobj into lp:txaws.
=== modified file 'txaws/s3/client.py'
--- txaws/s3/client.py	2009-11-23 00:55:44 +0000
+++ txaws/s3/client.py	2010-09-17 20:56:46 +0000
@@ -178,7 +178,7 @@
             common_prefixes)
 
     def put_object(self, bucket, object_name, data, content_type=None,
-                   metadata={}):
+                   metadata={}, amz_headers={}):
         """
         Put an object in a bucket.
 
@@ -187,8 +187,27 @@
         query = self.query_factory(
             action="PUT", creds=self.creds, endpoint=self.endpoint,
             bucket=bucket, object_name=object_name, data=data,
-            content_type=content_type, metadata=metadata)
-        return query.submit()
+            content_type=content_type, metadata=metadata, amz_headers=amz_headers)
+        return query.submit()
+
+
+    def copy_object(self, src_bucket, src_object_name, dest_bucket=None, dest_object_name=None,
+                    metadata={}, amz_headers={}):
+        """
+        Copy source object stored in s3 to destination bucket. If destination bucket is not given, this
+        is assumed to be the same as the source bucket. The same applies to the destination object
+        name. The copy-source header is derived from source bucket and object name, but additional
+        x-amz-* headers can be passed in amz_headers dict.
+        """
+        dest_bucket = dest_bucket or src_bucket
+        dest_object_name = dest_object_name or src_object_name
+        amz_headers['copy-source'] = '/%s/%s' % (src_bucket, src_object_name)
+        query = self.query_factory(
+            action="PUT", creds=self.creds, endpoint=self.endpoint,
+            bucket=dest_bucket, object_name=dest_object_name,
+            metadata=metadata, amz_headers=amz_headers)
+        return query.submit()
+        
 
     def get_object(self, bucket, object_name):
         """
@@ -225,13 +244,14 @@
     """A query for submission to the S3 service."""
 
     def __init__(self, bucket=None, object_name=None, data="",
-                 content_type=None, metadata={}, *args, **kwargs):
+                 content_type=None, metadata={}, amz_headers={}, *args, **kwargs):
         super(Query, self).__init__(*args, **kwargs)
         self.bucket = bucket
         self.object_name = object_name
         self.data = data
         self.content_type = content_type
         self.metadata = metadata
+        self.amz_headers = amz_headers
         self.date = datetimeToString()
         if not self.endpoint or not self.endpoint.host:
             self.endpoint = AWSServiceEndpoint(S3_ENDPOINT)
@@ -257,6 +277,8 @@
                    "Date": self.date}
         for key, value in self.metadata.iteritems():
             headers["x-amz-meta-" + key] = value
+        for key, value in self.amz_headers.iteritems():
+            headers["x-amz-" + key] = value
         # Before we check if the content type is set, let's see if we can set
         # it by guessing the the mimetype.
         self.set_content_type()

=== modified file 'txaws/s3/tests/test_client.py'
--- txaws/s3/tests/test_client.py	2009-11-23 00:55:44 +0000
+++ txaws/s3/tests/test_client.py	2010-09-17 20:56:46 +0000
@@ -202,11 +202,12 @@
 
             def __init__(query, action, creds, endpoint, bucket=None,
                 object_name=None, data=None, content_type=None,
-                metadata=None):
+                metadata=None, amz_headers=None):
                 super(StubQuery, query).__init__(
                     action=action, creds=creds, bucket=bucket,
                     object_name=object_name, data=data,
-                    content_type=content_type, metadata=metadata)
+                    content_type=content_type, metadata=metadata,
+                    amz_headers=amz_headers)
                 self.assertEqual(action, "PUT")
                 self.assertEqual(creds.access_key, "foo")
                 self.assertEqual(creds.secret_key, "bar")
@@ -215,6 +216,7 @@
                 self.assertEqual(query.data, "some data")
                 self.assertEqual(query.content_type, "text/plain")
                 self.assertEqual(query.metadata, {"key": "some meta data"})
+                self.assertEqual(query.amz_headers, {"acl": "public-read"})
 
             def submit(query):
                 return succeed(None)
@@ -223,6 +225,37 @@
         s3 = client.S3Client(creds, query_factory=StubQuery)
         return s3.put_object(
             "mybucket", "objectname", "some data", content_type="text/plain",
+            metadata={"key": "some meta data"}, amz_headers={'acl':'public-read'})
+
+    def test_copy_object(self):
+
+        class StubQuery(client.Query):
+
+            def __init__(query, action, creds, endpoint, bucket=None,
+                object_name=None, data=None, content_type=None,
+                metadata=None, amz_headers=None):
+                super(StubQuery, query).__init__(
+                    action=action, creds=creds, bucket=bucket,
+                    object_name=object_name, data=data,
+                    content_type=content_type, metadata=metadata,
+                    amz_headers=amz_headers)
+                self.assertEqual(action, "PUT")
+                self.assertEqual(creds.access_key, "foo")
+                self.assertEqual(creds.secret_key, "bar")
+                self.assertEqual(query.bucket, "newbucket")
+                self.assertEqual(query.object_name, "newobjectname")
+                self.assertEqual(query.data, None)
+                self.assertEqual(query.content_type, None)
+                self.assertEqual(query.metadata, {"key": "some meta data"})
+                self.assertEqual(query.amz_headers, {"copy-source": "/mybucket/objectname"})
+
+            def submit(query):
+                return succeed(None)
+
+        creds = AWSCredentials("foo", "bar")
+        s3 = client.S3Client(creds, query_factory=StubQuery)
+        return s3.copy_object(
+            "mybucket", "objectname", "newbucket", "newobjectname",
             metadata={"key": "some meta data"})
 
     def test_get_object(self):
@@ -231,7 +264,7 @@
 
             def __init__(query, action, creds, endpoint, bucket=None,
                 object_name=None, data=None, content_type=None,
-                metadata=None):
+                metadata=None, amz_headers=None):
                 super(StubQuery, query).__init__(
                     action=action, creds=creds, bucket=bucket,
                     object_name=object_name, data=data,
@@ -400,7 +433,7 @@
         request = client.Query(
             action="PUT", bucket="somebucket", object_name="object/name/here",
             data=DATA, content_type="text/plain", metadata={"foo": "bar"},
-            creds=self.creds, endpoint=self.endpoint)
+            amz_headers={"acl":"public-read"}, creds=self.creds, endpoint=self.endpoint)
         request.sign = lambda headers: "TESTINGSIG="
         self.assertEqual(request.action, "PUT")
         headers = request.get_headers()
@@ -411,7 +444,8 @@
                 "Content-Type": "text/plain",
                 "Content-Length": len(DATA),
                 "Content-MD5": DIGEST,
-                "x-amz-meta-foo": "bar"})
+                "x-amz-meta-foo": "bar",
+                "x-amz-acl": "public-read"})
         self.assertEqual(request.data, "objectData")
 
     def test_bucket_query(self):


Follow ups