Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save duythinht/a08a2c17f3b122ae5a1b to your computer and use it in GitHub Desktop.
Save duythinht/a08a2c17f3b122ae5a1b to your computer and use it in GitHub Desktop.

App Engine update cloudstorage using SDK and GAE production

This code shows how to read and write blobs and how to create a serving url.

The blobfiles can be images or other files like css, js and pdf's. We use the default bucket in Google Cloud Storage (GCS) to store the blobs.
From the docs: An application can use the default GCS bucket, which provides an already configured bucket with free quota.

This code still needs the blobstore.create_gs_key() for images.get_serving_url()
We use an image serving url for dynamic resizing and cropping.

gcs_upload contains the code to upload a file to cloudstorage, using static html for the form:

upload: https://appid.appspot.com/static/gcs_upload.html

The upload limit is 32 megabytes, because the example code does not use the blobstore create_upload_url callback.

To serve the data, you can use in your Jinja HTML template:

js:  <script type="text/javascript" src="{{ serving_url }}"></script>
css: <link type="text/css" rel="stylesheet" href="{{ serving_url }}">
pdf: <a href="{{ serving_url }}" target="_blank">Test PDF</a>
img: <img  alt="{{ filename }}" src="{{ serving_url }}" />

In GAE production the serving url looks like:

images: https://lhN.ggpht.com/NlCARAtN.........3NQW9ZxYpms=s698
other:  https://storage.googleapis.com/default_bucket/file_name

And in the SDK:

images: http://localhost:8080/_ah/img/encoded_gs_file:YXBwX2R......Y3Nz
other:  https://localhost:8080/_ah/gcs/default_bucket/file_name

The SDK encoded_gs_file id = base64.urlsafe_b64encode(app_default_bucket/filename)

This code was tested using App Engine 1.9.6 and the GCS client library

To install the client library on Windows 7 in my project, I used:

C:\Python27\scripts>pip install setuptools --no-use-wheel --upgrade
C:\Python27\scripts>pip install GoogleAppEngineCloudStorageClient -t <my_app_directory_root>
application: gcstest
version: 1
runtime: python27
api_version: 1
threadsafe: yes
handlers:
# this folder contains gcs_upload.html
- url: /static
static_dir: static
- url: /gcs_upload
script: gcs_upload.app
libraries:
- name: webapp2
version: latest
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, with_statement
from google.appengine.ext import blobstore
from google.appengine.api import app_identity, images
# to use cloudstorage install the GCS client in your project
import cloudstorage as gcs
from google.appengine.ext import ndb
import os
import mimetypes
# bonus, zip Dynamics entities and binary GCS blobs
import zipfile
import logging
class Dynamics(ndb.Model):
filename = ndb.StringProperty()
extension = ndb.ComputedProperty(lambda self: self.filename.rsplit('.', 1)[1].lower())
serving_url = ndb.StringProperty(default=None)
default_bucket = app_identity.get_default_gcs_bucket_name()
gae_development = os.environ['SERVER_SOFTWARE'].startswith('Development')
def gcs_serving_url(dyn):
""" serving url for google cloud storage dyn entity """
gcs_file_name = '/%s/%s' % (default_bucket, dyn.filename)
if dyn.extension in ['png', 'jpg', 'gif']:
dyn.serving_url = images.get_serving_url(
blobstore.create_gs_key('/gs' + gcs_file_name), secure_url=True)
elif gae_development:
# this SDK feature has not been documented yet !!!
dyn.serving_url = 'http://localhost:8080/_ah/gcs' + gcs_file_name
else:
dyn.serving_url = 'https://storage.googleapis.com' + gcs_file_name
return dyn.serving_url
def gcs_read_blob(dyn):
""" read binary blob from google cloud storage """
gcs_file_name = '/%s/%s' % (default_bucket, dyn.filename)
try:
with gcs.open(gcs_file_name) as f:
return f.read()
except gcs.NotFoundError, e:
logging.warning('GCS file %s NOT FOUND : %s' % (gcs_file_name, e))
return None
def gcs_write_blob(dyn, blob):
""" update google cloud storage dyn entity """
gcs_file_name = '/%s/%s' % (default_bucket, dyn.filename)
content_type = mimetypes.guess_type(dyn.filename)[0]
if dyn.extension in ['js', 'css']:
content_type += b'; charset=utf-8'
with gcs.open(gcs_file_name, 'w', content_type=content_type,
options={b'x-goog-acl': b'public-read'}) as f:
f.write(blob)
return gcs_file_name
def gcs_content_type(dyn):
gcs_file_name = '/%s/%s' % (default_bucket, dyn.filename)
return gcs.stat(gcs_file_name).content_type
def gcs_zip_dynamics():
""" bonus: save Dynamics and GCS blobs in a zip archive """
gcs_file_name = '/%s/dynamics.zip' % default_bucket
with gcs.open(gcs_file_name, 'w', content_type=b'multipart/x-zip') as f:
with zipfile.ZipFile(f, 'w') as z:
for each in Dynamics.query():
member_dir = each.filename.replace('.', '_').encode('utf-8')
z.writestr(b'%s/safe_key.txt' % member_dir, each.key.urlsafe().encode('utf-8'))
z.writestr(b'%s/serving_url.txt' % member_dir, each.serving_url.encode('utf-8'))
# if we have a GCS blob for this entity, save it in this member
blob = gcs_read_blob(each)
if blob:
z.writestr(b'%s/%s' % (member_dir, each.filename), blob)
z.writestr(b'%s/content_type.txt' % member_dir, gcs_content_type(each))
# example create a serving url
entity = Dynamics(id='test.pdf', filename='test.pdf')
gcs_serving_url(entity)
entity.put()
<!DOCTYPE HTML>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>GCS Upload</title>
</head>
<body>
<form action="/gcs_upload" enctype="multipart/form-data" method="post">
<div><input type="file" name="file"/></div>
<div><input type="submit" value="Upload"></div>
</form>
</body>
</html>
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, with_statement
import cgi
import gcs_data
import webapp2
import logging
class GcsUpload(webapp2.RequestHandler):
""" upload to cloudstorage and save serving_url in dyn
We use upload and write to make the cloudstorage file public
and to set the Content_Type with charset utf-8 fot text files """
def post(self):
field_storage = self.request.POST.get("file", None)
if isinstance(field_storage, cgi.FieldStorage):
file_name = field_storage.filename
dyn = gcs_data.Dynamics(id=file_name, filename=file_name)
gcs_file_name = gcs_data.gcs_write_blob(dyn, field_storage.file.read())
gcs_data.gcs_serving_url(dyn)
dyn.put()
logging.info('Uploaded and saved in default GCS bucket : ' + gcs_file_name)
self.response.headers[b'Content-Type'] = gcs_data.gcs_content_type(dyn)
self.response.write(gcs_data.gcs_read_blob(dyn))
else:
logging.error('GCS Upload failed')
routes = [webapp2.Route(r'/gcs_upload', handler=GcsUpload)]
app = webapp2.WSGIApplication(routes=routes, debug=True)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment