Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions docs/_components/storage-getting-started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,10 @@ Python built-in ``object``.
If you want to set some data, you just create a ``Blob`` inside your bucket
and store your data inside the blob::

>>> blob = bucket.new_blob('greeting.txt')
>>> blob = storage.Blob('greeting.txt', bucket=bucket)
>>> blob.upload_from_string('Hello world!')

:func:`new_blob <gcloud.storage.bucket.Bucket.new_blob>` creates a
:class:`Blob <gcloud.storage.blob.Blob>` object locally and
This creates a :class:`Blob <gcloud.storage.blob.Blob>` object locally and
:func:`upload_from_string <gcloud.storage.blob.Blob.upload_from_string>`
allows you to put a string into the blob.

Expand All @@ -117,8 +116,7 @@ Then you can look at the file in a terminal::
And what about when you're not dealing with text?
That's pretty simple too::

>>> blob = bucket.new_blob('kitten.jpg')
>>> blob.upload_from_filename('kitten.jpg')
>>> blob = bucket.upload_file('kitten.jpg')

And to test whether it worked?

Expand All @@ -136,9 +134,9 @@ remotely. If the blob doesn't exist, it will return ``None``.
.. note:: ``get_blob`` is **not** retrieving the entire object's data.

If you want to "get-or-create" the blob (that is, overwrite it if it
already exists), you can use :func:`new_blob
<gcloud.storage.bucket.Bucket.new_blob>`. However, keep in mind, the blob
is not created until you store some data inside of it.
already exists), you can just construct a :class:`gcloud.storage.blob.Blob`
locally and update it. However, keep in mind, the blob is not created
remotely until you store some data inside of it.

If you want to check whether a blob exists, you can use the ``in`` operator
in Python::
Expand Down
2 changes: 1 addition & 1 deletion docs/_components/storage-quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ you can create buckets and blobs::
>>> bucket = storage.create_bucket('my-new-bucket', connection=connection)
>>> print bucket
<Bucket: my-new-bucket>
>>> blob = bucket.new_blob('my-test-file.txt')
>>> blob = storage.Blob('my-test-file.txt', bucket=bucket)
>>> print blob
<Blob: my-new-bucket, my-test-file.txt>
>>> blob = blob.upload_from_string('this is test content!')
Expand Down
2 changes: 1 addition & 1 deletion docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -49,5 +49,5 @@ Cloud Storage
from gcloud import storage
storage.set_defaults()
bucket = storage.get_bucket('<your-bucket-name>')
blob = bucket.new_blob('my-test-file.txt')
blob = storage.Blob('my-test-file.txt', bucket=bucket)
blob = blob.upload_contents_from_string('this is test content!')
2 changes: 1 addition & 1 deletion gcloud/storage/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def _reload_properties(self):
:returns: The object you just reloaded data for.
"""
# Pass only '?projection=noAcl' here because 'acl' and related
# are handled via custom endpoints..
# are handled via custom endpoints.
query_params = {'projection': 'noAcl'}
self._properties = self.connection.api_request(
method='GET', path=self.path, query_params=query_params)
Expand Down
21 changes: 18 additions & 3 deletions gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,21 @@ def __init__(self, name, bucket=None, properties=None):

self.bucket = bucket

@staticmethod
def path_helper(bucket_path, blob_name):
"""Relative URL path for a blob.

:type bucket_path: string
:param bucket_path: The URL path for a bucket.

:type blob_name: string
:param blob_name: The name of the blob.

:rtype: string
:returns: The relative URL path for ``blob_name``.
"""
return bucket_path + '/o/' + quote(blob_name, safe='')

@property
def acl(self):
"""Create our ACL on demand."""
Expand Down Expand Up @@ -109,7 +124,7 @@ def path(self):
if not self.name:
raise ValueError('Cannot determine path without a blob name.')

return self.bucket.path + '/o/' + quote(self.name, safe='')
return self.path_helper(self.bucket.path, self.name)

@property
def public_url(self):
Expand Down Expand Up @@ -179,7 +194,7 @@ def rename(self, new_name):
:returns: The newly-copied blob.
"""
new_blob = self.bucket.copy_blob(self, self.bucket, new_name)
self.bucket.delete_blob(self)
self.delete()
return new_blob

def delete(self):
Expand All @@ -191,7 +206,7 @@ def delete(self):
(propagated from
:meth:`gcloud.storage.bucket.Bucket.delete_blob`).
"""
return self.bucket.delete_blob(self)
return self.bucket.delete_blob(self.name)

def download_to_file(self, file_obj):
"""Download the contents of this blob into a file-like object.
Expand Down
101 changes: 37 additions & 64 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def path(self):

return self.path_helper(self.name)

def get_blob(self, blob):
def get_blob(self, blob_name):
"""Get a blob object by name.

This will return None if the blob doesn't exist::
Expand All @@ -176,15 +176,13 @@ def get_blob(self, blob):
>>> print bucket.get_blob('/does-not-exist.txt')
None

:type blob: string or :class:`gcloud.storage.blob.Blob`
:param blob: The name of the blob to retrieve.
:type blob_name: string
:param blob_name: The name of the blob to retrieve.

:rtype: :class:`gcloud.storage.blob.Blob` or None
:returns: The blob object if it exists, otherwise None.
"""
# Coerce this -- either from a Blob or a string.
blob = self.new_blob(blob)

blob = Blob(bucket=self, name=blob_name)
try:
response = self.connection.api_request(method='GET',
path=blob.path)
Expand Down Expand Up @@ -243,27 +241,6 @@ def iterator(self, prefix=None, delimiter=None, max_results=None,

return self._iterator_class(self, extra_params=extra_params)

def new_blob(self, blob):
"""Given path name (or Blob), return a :class:`Blob` object.

This is really useful when you're not sure if you have a ``Blob``
instance or a string path name. Given either of those types, this
returns the corresponding ``Blob``.

:type blob: string or :class:`gcloud.storage.blob.Blob`
:param blob: A path name or actual blob object.

:rtype: :class:`gcloud.storage.blob.Blob`
:returns: A Blob object with the path provided.
"""
if isinstance(blob, Blob):
return blob

if isinstance(blob, six.string_types):
return Blob(bucket=self, name=blob)

raise TypeError('Invalid blob: %s' % blob)

def delete(self, force=False):
"""Delete this bucket.

Expand Down Expand Up @@ -303,10 +280,10 @@ def delete(self, force=False):

self.connection.api_request(method='DELETE', path=self.path)

def delete_blob(self, blob):
def delete_blob(self, blob_name):
"""Deletes a blob from the current bucket.

If the blob isn't found, raise a
If the blob isn't found (backend 404), raises a
:class:`gcloud.exceptions.NotFound`.

For example::
Expand All @@ -323,21 +300,17 @@ def delete_blob(self, blob):
... except NotFound:
... pass

:type blob_name: string
:param blob_name: A blob name to delete.

:type blob: string or :class:`gcloud.storage.blob.Blob`
:param blob: A blob name or Blob object to delete.

:rtype: :class:`gcloud.storage.blob.Blob`
:returns: The blob that was just deleted.
:raises: :class:`gcloud.exceptions.NotFound` (to suppress
the exception, call ``delete_blobs``, passing a no-op
``on_error`` callback, e.g.::

>>> bucket.delete_blobs([blob], on_error=lambda blob: None)
"""
blob = self.new_blob(blob)
self.connection.api_request(method='DELETE', path=blob.path)
return blob
blob_path = Blob.path_helper(self.path, blob_name)
self.connection.api_request(method='DELETE', path=blob_path)

def delete_blobs(self, blobs, on_error=None):
"""Deletes a list of blobs from the current bucket.
Expand All @@ -357,7 +330,10 @@ def delete_blobs(self, blobs, on_error=None):
"""
for blob in blobs:
try:
self.delete_blob(blob)
blob_name = blob
if not isinstance(blob_name, six.string_types):
blob_name = blob.name
self.delete_blob(blob_name)
except NotFound:
if on_error is not None:
on_error(blob)
Expand All @@ -382,12 +358,12 @@ def copy_blob(self, blob, destination_bucket, new_name=None):
"""
if new_name is None:
new_name = blob.name
new_blob = destination_bucket.new_blob(new_name)
new_blob = Blob(bucket=destination_bucket, name=new_name)
api_path = blob.path + '/copyTo' + new_blob.path
self.connection.api_request(method='POST', path=api_path)
return new_blob

def upload_file(self, filename, blob=None):
def upload_file(self, filename, blob_name=None):
"""Shortcut method to upload a file into this bucket.

Use this method to quickly put a local file in Cloud Storage.
Expand All @@ -401,9 +377,8 @@ def upload_file(self, filename, blob=None):
>>> print bucket.get_all_blobs()
[<Blob: my-bucket, remote-text-file.txt>]

If you don't provide a blob value, we will try to upload the file
using the local filename as the blob (**not** the complete
path)::
If you don't provide a blob name, we will try to upload the file
using the local filename (**not** the complete path)::

>>> from gcloud import storage
>>> connection = storage.get_connection(project)
Expand All @@ -415,22 +390,22 @@ def upload_file(self, filename, blob=None):
:type filename: string
:param filename: Local path to the file you want to upload.

:type blob: string or :class:`gcloud.storage.blob.Blob`
:param blob: The blob (either an object or a remote path) of where
to put the file. If this is blank, we will try to
upload the file to the root of the bucket with the
same name as on your local file system.
:type blob_name: string
:param blob_name: The name of the blob to upload the file to. If this
is blank, we will try to upload the file to the root
of the bucket with the same name as on your local
file system.

:rtype: :class:`Blob`
:returns: The updated Blob object.
"""
if blob is None:
blob = os.path.basename(filename)
blob = self.new_blob(blob)
if blob_name is None:
blob_name = os.path.basename(filename)
blob = Blob(bucket=self, name=blob_name)
blob.upload_from_filename(filename)
return blob

def upload_file_object(self, file_obj, blob=None):
def upload_file_object(self, file_obj, blob_name=None):
"""Shortcut method to upload a file object into this bucket.

Use this method to quickly put a local file in Cloud Storage.
Expand All @@ -444,9 +419,8 @@ def upload_file_object(self, file_obj, blob=None):
>>> print bucket.get_all_blobs()
[<Blob: my-bucket, remote-text-file.txt>]

If you don't provide a blob value, we will try to upload the file
using the local filename as the blob (**not** the complete
path)::
If you don't provide a blob name, we will try to upload the file
using the local filename (**not** the complete path)::

>>> from gcloud import storage
>>> connection = storage.get_connection(project)
Expand All @@ -458,19 +432,18 @@ def upload_file_object(self, file_obj, blob=None):
:type file_obj: file
:param file_obj: A file handle open for reading.

:type blob: string or :class:`gcloud.storage.blob.Blob`
:param blob: The blob (either an object or a remote path) of where
to put the file. If this is blank, we will try to
upload the file to the root of the bucket with the
same name as on your local file system.
:type blob_name: string
:param blob_name: The name of the blob to upload the file to. If this
is blank, we will try to upload the file to the root
of the bucket with the same name as on your local
file system.

:rtype: :class:`Blob`
:returns: The updated Blob object.
"""
if blob:
blob = self.new_blob(blob)
else:
blob = self.new_blob(os.path.basename(file_obj.name))
if blob_name is None:
blob_name = os.path.basename(file_obj.name)
blob = Blob(bucket=self, name=blob_name)
blob.upload_from_file(file_obj)
return blob

Expand Down
2 changes: 1 addition & 1 deletion gcloud/storage/demo/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
print(list(storage.get_all_buckets(connection)))

# How about we create a new blob inside this bucket.
blob = bucket.new_blob("my-new-file.txt")
blob = storage.Blob("my-new-file.txt", bucket=bucket)

# Now let's put some data in there.
blob.upload_from_string("this is some data!")
Expand Down
10 changes: 5 additions & 5 deletions gcloud/storage/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -1031,17 +1031,17 @@ def __init__(self, connection):
self._blobs = {}
self._deleted = []

def get_blob(self, blob):
return self._blobs.get(blob)
def get_blob(self, blob_name):
return self._blobs.get(blob_name)

def copy_blob(self, blob, destination_bucket, new_name):
destination_bucket._blobs[new_name] = self._blobs[blob.name]
return blob.__class__(None, bucket=destination_bucket,
properties={'name': new_name})

def delete_blob(self, blob):
del self._blobs[blob.name]
self._deleted.append(blob.name)
def delete_blob(self, blob_name):
del self._blobs[blob_name]
self._deleted.append(blob_name)


class _Signer(object):
Expand Down
24 changes: 2 additions & 22 deletions gcloud/storage/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,25 +306,6 @@ def test_iterator_explicit(self):
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], EXPECTED)

def test_new_blob_existing(self):
from gcloud.storage.blob import Blob
bucket = self._makeOne()
existing = Blob(None, bucket=bucket)
self.assertTrue(bucket.new_blob(existing) is existing)

def test_new_blob_str(self):
from gcloud.storage.blob import Blob
BLOB_NAME = 'blob-name'
bucket = self._makeOne()
blob = bucket.new_blob(BLOB_NAME)
self.assertTrue(isinstance(blob, Blob))
self.assertTrue(blob.bucket is bucket)
self.assertEqual(blob.name, BLOB_NAME)

def test_new_blob_invalid(self):
bucket = self._makeOne()
self.assertRaises(TypeError, bucket.new_blob, object())

def test_delete_default_miss(self):
from gcloud.exceptions import NotFound
NAME = 'name'
Expand Down Expand Up @@ -410,9 +391,8 @@ def test_delete_blob_hit(self):
BLOB_NAME = 'blob-name'
connection = _Connection({})
bucket = self._makeOne(connection, NAME)
blob = bucket.delete_blob(BLOB_NAME)
self.assertTrue(blob.bucket is bucket)
self.assertEqual(blob.name, BLOB_NAME)
result = bucket.delete_blob(BLOB_NAME)
self.assertTrue(result is None)
kw, = connection._requested
self.assertEqual(kw['method'], 'DELETE')
self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
Expand Down