The Google GCloud example

This is the notebook that illustrates using gcloud datastore for section 3.4. notice that the authentication has already been established when we install gs

In [1]:
from gcloud import storage
In [2]:
client = storage.Client()
In [3]:
from gcloud import datastore
clientds = datastore.Client()
In [4]:
import csv
In [10]:
bucket = client.bucket('book-datacont')
key = clientds.key('book-table')
In [13]:
with open('path to your csv file\experiments.csv', 'rb') as csvfile:
    csvf = csv.reader(csvfile, delimiter=',', quotechar='|')
    for item in csvf:
        print item
        blob = bucket.blob(item[3])
        data = open("path-to-your-datafiles\datafiles\\"+item[3], 'rb')
        blob.upload_from_file(data)
        blob.make_public()
        url = "https://storage.googleapis.com/book-datacont/"+item[3]
        entity = datastore.Entity(key=key)
        entity['experiment-name'] = item[0]
        entity['experiment-id'] = item[1]
        entity['date'] = item[2]
        entity['description'] = item[4]
        entity['url'] = url
        clientds.put(entity)
['experiment1', '1', '3/15/2002', 'exp1', 'this is the comment']
['experiment1', '2', '3/15/2002', 'exp2', 'this is the comment2']
['experiment2', '3', '3/16/2002', 'exp3', 'this is the comment3']
['experiment3', '4', '3/16/2002', 'exp4', 'this is the comment233']
In [138]:
query = clientds.query(kind=u'book-table')
In [139]:
query.add_filter(u'experiment-name', '=', 'experiment1')
In [141]:
results = list(query.fetch())
In [145]:
urls = [result['url'] for result in results]
In [146]:
urls
Out[146]:
['https://storage.googleapis.com/book-datacont/exp1',
 'https://storage.googleapis.com/book-datacont/exp2']
In [ ]: