Skip to content

Instantly share code, notes, and snippets.

@ziyadm
Created October 25, 2012 00:21
Show Gist options
  • Save ziyadm/3949774 to your computer and use it in GitHub Desktop.
Save ziyadm/3949774 to your computer and use it in GitHub Desktop.
Google Compute Engine Integration into Apache Libcloud
diff --git a/demos/compute_demo.py b/demos/compute_demo.py
index e9f120f..37ad482 100644
--- a/demos/compute_demo.py
+++ b/demos/compute_demo.py
@@ -36,7 +36,7 @@ sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__),
from libcloud.common.types import InvalidCredsError
from libcloud.compute.types import Provider
-from libcloud.providers import get_driver
+from libcloud.compute.providers import get_driver
from pprint import pprint
diff --git a/demos/google_compute_engine/demo.py b/demos/google_compute_engine/demo.py
new file mode 100644
index 0000000..f32af5e
--- /dev/null
+++ b/demos/google_compute_engine/demo.py
@@ -0,0 +1,119 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import libcloud.test.secrets as secrets
+
+from libcloud.compute.types import Provider
+from libcloud.compute.providers import get_driver
+
+from pprint import pprint
+
+# Set up your compute driver
+GoogleComputeEngine = get_driver(Provider.GCE)
+
+# Instantiate your compute driver with the required credentials. For Google
+# Compute Engine, these are (ssh_username, ssh_private_key_file, project).
+ssh_username, ssh_private_key_file, project = getattr(secrets,
+ 'GCE_PARAMS',
+ ())
+driver = GoogleComputeEngine(ssh_username, ssh_private_key_file, project)
+
+# Get the list of available images.
+images = driver.list_images()
+print "List of images:"
+pprint(images)
+print "\n"
+
+# Get the list of available sizes (machine types).
+sizes = driver.list_sizes()
+print "List of sizes (machine types):"
+pprint(sizes)
+print "\n"
+
+# Get the list of available locations.
+locations = driver.list_locations()
+print "List of locations:"
+pprint(locations)
+print "\n"
+
+# Create a new node, 'new_node_name', using a machine type, image, and location
+# from the list of available machine types, images, and locations.
+image = images[-1]
+size = sizes[0]
+location = locations[0]
+new_node_name = 'my-new-node'
+node = driver.create_node(new_node_name, size, image, location)
+print "Creating a new node:", node.name
+pprint(node)
+print "\n"
+
+# Print metadata for node. This will contain a script if bootstrapping node
+# with a startup script.
+print "Metadata for:", node.name
+pprint(node.extra['metadata'])
+print "\n"
+
+# Get the list of nodes in your cluster.
+nodes = driver.list_nodes()
+print "List of nodes:"
+pprint(nodes)
+print "\n"
+
+# To see the following command take effect, ssh into 'new_node_name'.
+# Restarting 'new_node_name'.
+ret = driver.reboot_node(node)
+if ret:
+ print "Successfully rebooted:", node.name
+else:
+ print "Unsuccessful in rebooting:", node.name
+pprint(node)
+print "\n"
+
+# To see the following command take effect, ssh into 'new_node_name'.
+# Deleting 'new_node_name'.
+ret = driver.destroy_node(node)
+if ret:
+ print "Successfully deleted:", node.name
+else:
+ print "Unsuccessful in deleting:", node.name
+pprint(node)
+print "\n"
+
+# Create a new node, 'new_node_name', using a machine type, image, and location
+# from the list of available machine types, images, and locations.
+# The node will be bootstrapped by running a desired script on first
+# initialization.
+script = '' # Full path to your bootstrap script.
+node = driver.deploy_node(node.name, size, image, location, script)
+print "Creating a new node:", node.name, " and deploying it with script \
+from", script
+pprint(node)
+print "\n"
+
+# Print metadata for node.
+print "Metadata for:", node.name
+pprint(node.extra['metadata'])
+print "\n"
+
+# Delete all nodes in cluster.
+print "Deleting all nodes in cluster.\n"
+for node in driver.list_nodes():
+ node.destroy()
+
+# Get the list of nodes in your cluster. This should return an empty list.
+nodes = driver.list_nodes()
+print "List of nodes:"
+pprint(nodes)
+print "\n"
diff --git a/libcloud/compute/drivers/__init__.py b/libcloud/compute/drivers/__init__.py
index 6b71f02..9d8f291 100644
--- a/libcloud/compute/drivers/__init__.py
+++ b/libcloud/compute/drivers/__init__.py
@@ -37,4 +37,5 @@ __all__ = [
'vcloud',
'voxel',
'vpsnet',
+ 'gce',
]
diff --git a/libcloud/compute/drivers/gce.py b/libcloud/compute/drivers/gce.py
new file mode 100644
index 0000000..644d20e
--- /dev/null
+++ b/libcloud/compute/drivers/gce.py
@@ -0,0 +1,392 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Libcloud driver for Google Compute Engine.
+
+Google Compute Engine home page:
+cloud.google.com/products/compute-engine.html
+
+Google Compute Engine documentation:
+developers.google.com/compute/docs
+"""
+
+import getpass
+import os
+import paramiko
+import sys
+
+from libcloud.common.gcelib import gce, gce_util, shortcuts
+from libcloud.compute.base import Node, NodeDriver, NodeImage, NodeLocation
+from libcloud.compute.base import NodeSize
+from libcloud.compute.providers import Provider
+from libcloud.compute.types import NodeState
+
+
+class GoogleComputeEngineNodeDriver(NodeDriver):
+ """
+ Google Compute Engine Node Driver
+ """
+ api_name = 'gce'
+ type = Provider.GCE
+ name = 'GoogleComputeEngine'
+
+ NODE_STATE_MAP = {
+ "PROVISIONING": NodeState.PENDING,
+ "STAGING": NodeState.PENDING,
+ "RUNNING": NodeState.RUNNING,
+ "STOPPED": NodeState.TERMINATED,
+ "TERMINATED": NodeState.TERMINATED
+ }
+
+ def __init__(self, ssh_username=None, ssh_private_key_file=None,
+ project=None, key=None):
+ """
+ @param ssh_username: The username that can be used to log into
+ Google Compute Engine nodes in a cluster (required).
+ @type ssh_username: C{str}
+
+ @param ssh_private_key_file: The fully qualified path to the ssh
+ private key file (required).
+ @type ssh_private_key_file: C{str}
+
+ @param project: The name of the Google Compute Engine project
+ (required).
+ @type project: C{str}
+
+ @rtype: None
+ """
+ super(GoogleComputeEngineNodeDriver, self).__init__(key)
+ self.credentials = gce_util.get_credentials()
+
+ if project:
+ self.project = project
+ else:
+ print "Please specify the project in your Driver's constructor."
+ sys.exit(1)
+
+ if ssh_username:
+ self.ssh_username = ssh_username
+ else:
+ print "Please specify your ssh username in your Driver's \
+ constructor."
+ sys.exit(1)
+
+ if ssh_private_key_file:
+ self.ssh_private_key_file = ssh_private_key_file
+ else:
+ print "Please specify your ssh private key file in your Driver's \
+ constructor."
+ sys.exit(1)
+
+ self.defaultZone = 'us-central1-a'
+ self.defaultImage = 'projects/google/images/ubuntu-12-04-v20120621'
+ self.defaultMachineType = 'n1-standard-1'
+
+ self.SSHClient = paramiko.SSHClient()
+ self.gcelib_instance = gce.get_api(self.credentials,
+ default_project=self.project,
+ default_zone=self.defaultZone,
+ default_image=self.defaultImage,
+ default_machine_type=
+ self.defaultMachineType)
+
+ def list_nodes(self):
+ """
+ List all Google Compute Engine nodes associated with the current
+ project.
+
+ @rtype: C{list} of L{Node}
+ """
+ list_nodes = []
+
+ for instance in self.gcelib_instance.all_instances():
+ node = self._to_node(instance)
+ list_nodes.append(node)
+
+ return list_nodes
+
+ def list_images(self):
+ """
+ List all available Google Compute Engine distribution images.
+
+ @rtype: C{list} of L{NodeImage}
+ """
+ list_images = []
+
+ for img in self.gcelib_instance.list_images(project='google'):
+ image = self._to_node_image(img)
+ list_images.append(image)
+
+ # TODO(zmir): This call currently returns an empty list. Dig into this
+ # to understand why/how to rectify.
+ #for img in self.gcelib_instance.list_images(project=self.project):
+ #image = self._to_node_image(img)
+ #list_images.append(image)
+
+ return list_images
+
+ def list_sizes(self, location=None):
+ """
+ List all available Google Compute Engine node sizes (machine types).
+
+ @keyword location: The location at which to list sizes (optional).
+ @type location: L{NodeLocation}
+
+ @rtype: C{list} of L{NodeSize}
+ """
+ list_sizes = []
+
+ # TODO(zmir): Determine whether the availableZone field has been
+ # implemented on the back-end, and subsequently, whether it has been
+ # propogated to gcelib.
+ for machine_type in self.gcelib_instance.list_machine_types():
+ size = self._to_node_size(machine_type)
+ list_sizes.append(size)
+
+ return list_sizes
+
+ def list_locations(self):
+ """
+ List all available Google Compute Engine zones.
+
+ @rtype: C{list} of L{NodeLocation}
+ """
+ list_locations = []
+
+ for zone in self.gcelib_instance.list_zones():
+ location = self._to_node_location(zone)
+ list_locations.append(location)
+
+ return list_locations
+
+ def create_node(self, name, size, image, location):
+ """
+ Create a new Google Compute Engine node.
+
+ @param name: The name of the new Google Compute Engine node
+ (required).
+ @type name: C{str}
+
+ @param size: The size of resources allocated to this node
+ (required).
+ @type size: L{NodeSize}
+
+ @param image: The OS Image to boot on this node (required).
+ @type image: L{NodeImage}
+
+ @param location: The zone in which to create this node
+ (required).
+ @type location: L{NodeLocation}
+
+ @rtype: L{Node}
+ """
+ self.gcelib_instance.insert_instance(name=name, machineType=size.name,
+ image=image.name,
+ zone=location.name,
+ project=self.project,
+ metadata=None)
+
+ return self._get_node(name)
+
+ def reboot_node(self, node):
+ """
+ Reboot the given Google Compute Engine node.
+
+ @param node: The Google Compute Engine node to reboot (required).
+ @type node: L{Node}
+
+ @rtype: C{bool}
+ """
+ ssh_username = self.ssh_username
+ ssh_private_key = self.ssh_private_key_file
+ ssh_host = node.private_ips[0]
+
+ ssh_private_key_file = os.path.expanduser(ssh_private_key)
+ ssh_private_key_pass = ""
+
+ try:
+ pkey = paramiko.RSAKey.from_private_key_file(ssh_private_key_file,
+ ssh_private_key_pass)
+ except paramiko.SSHException:
+ prompt = 'Enter passphrase for key \'' + ssh_private_key_file + \
+ '\': '
+ ssh_private_key_pass = getpass.getpass(prompt=prompt)
+ pkey = paramiko.RSAKey.from_private_key_file(ssh_private_key_file,
+ ssh_private_key_pass)
+ try:
+ ssh_client = self.SSHClient
+ ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ ssh_client.connect(ssh_host, username=ssh_username, pkey=pkey)
+ ssh_client.exec_command('sudo reboot')
+ ssh_client.close()
+ return True
+ except Exception:
+ return False
+
+ def destroy_node(self, node):
+ """
+ Destroy the given Google Compute Engine node.
+
+ @param node: The Google Compute Engine node to destroy (required).
+ @type node: L{Node}
+
+ @rtype: C{bool}
+ """
+ try:
+ self.gcelib_instance.delete_instance(node.name)
+ return True
+ except Exception:
+ return False
+
+ def deploy_node(self, name, size, image, location, script):
+ """
+ Create a new Google Compute Engine node, and run a startup script on
+ initialization
+
+ @param name: The name of the new Google Compute Engine node
+ (required).
+ @type name: C{str}
+
+ @param size: The size of resources allocated to this node
+ (required).
+ @type size: L{NodeSize}
+
+ @param image: The OS Image to boot on this node (required).
+ @type image: L{NodeImage}
+
+ @param location: The zone in which to create this node
+ (required).
+ @type location: L{NodeLocation}
+
+ @param script: The fully qualified local path to the startup
+ script to run on node initialization (required).
+ @type script: C{string}
+
+ @rtype: L{Node}
+ """
+ startup_script = shortcuts.metadata({'startup-script':
+ open(script).read()})
+
+ self.gcelib_instance.insert_instance(name=name, machineType=size.name,
+ image=image.name,
+ zone=location.name,
+ project=self.project,
+ metadata=startup_script)
+
+ return self._get_node(name)
+
+ def _get_node(self, name):
+ """
+ Get the Google Compute Engine node associated with name.
+
+ @param name: The name of the Google Compute Engine node to be
+ returned (required).
+ @type name: C{str}
+
+ @rtype: L{Node}
+ """
+ gcelib_instance = self.gcelib_instance.get_instance(name)
+ if gcelib_instance is None:
+ return gcelib_instance
+ else:
+ return self._to_node(gcelib_instance)
+
+ def _to_node(self, node):
+ """
+ Convert the gcelib node into a Node.
+
+ @param node: The gcelib node to be converted into a Node
+ (required).
+ @type node: C{gcelib node}
+
+ @rtype: L{Node}
+ """
+ public_ips = []
+ private_ips = []
+ extra = {}
+
+ extra['status'] = node.status
+ extra['machineType'] = node.machineType
+ extra['description'] = node.description
+ extra['zone'] = node.zone
+ extra['image'] = node.image
+ extra['disks'] = node.disks
+ extra['networkInterfaces'] = node.networkInterfaces
+ extra['id'] = node.id
+ extra['selfLink'] = node.selfLink
+ extra['name'] = node.name
+ extra['metadata'] = node.metadata
+
+ for network_interface in node.networkInterfaces:
+ public_ips.append(network_interface.networkIP)
+ for access_config in network_interface.accessConfigs:
+ private_ips.append(access_config.natIP)
+
+ return Node(id=node.id, name=node.name,
+ state=self.NODE_STATE_MAP[node.status],
+ public_ips=public_ips, private_ips=private_ips,
+ driver=self, size=node.machineType, image=node.image,
+ extra=extra)
+
+ def _to_node_image(self, image):
+ """
+ Convert the gcelib image into a NodeImage.
+
+ @param node: The gcelib image to be converted into a NodeImage.
+ @type node: C{gcelib image}
+
+ @rtype: L{NodeImage}
+ """
+ extra = {}
+ extra['preferredKernel'] = image.preferredKernel
+ extra['description'] = image.description
+ extra['creationTimestamp'] = image.creationTimestamp
+
+ return NodeImage(id=image.id, name=image.selfLink, driver=self,
+ extra=extra)
+
+ def _to_node_location(self, location):
+ """
+ Convert the gcelib location into a NodeLocation.
+
+ @param node: The gcelib location to be converted into a
+ NodeLocation (required).
+ @type node: C{gcelib location}
+
+ @rtype: L{NodeLocation}
+ """
+ return NodeLocation(id=location.id, name=location.name, country='US',
+ driver=self)
+
+ def _to_node_size(self, machine_type):
+ """
+ Convert the gcelib machine type into a NodeSize.
+
+ @param node: The gcelib machine type to be converted into a
+ NodeSize (required).
+ @type node: C{gcelib machine type}
+
+ @rtype: L{NodeSize}
+ """
+ try:
+ price = self._get_size_price(size_id=machine_type.name)
+ except KeyError:
+ price = None
+
+ return NodeSize(id=machine_type.id, name=machine_type.name,
+ ram=machine_type.memoryMb,
+ disk=machine_type.imageSpaceGb, bandwidth=0,
+ price=price, driver=self)
diff --git a/libcloud/compute/providers.py b/libcloud/compute/providers.py
index 32b291e..b82cefe 100644
--- a/libcloud/compute/providers.py
+++ b/libcloud/compute/providers.py
@@ -122,7 +122,9 @@ DRIVERS = {
Provider.VCL:
('libcloud.compute.drivers.vcl', 'VCLNodeDriver'),
Provider.KTUCLOUD:
- ('libcloud.compute.drivers.ktucloud', 'KTUCloudNodeDriver')
+ ('libcloud.compute.drivers.ktucloud', 'KTUCloudNodeDriver'),
+ Provider.GCE:
+ ('libcloud.compute.drivers.gce', 'GoogleComputeEngineNodeDriver')
}
diff --git a/libcloud/compute/types.py b/libcloud/compute/types.py
index 80f228a..e2d6b11 100644
--- a/libcloud/compute/types.py
+++ b/libcloud/compute/types.py
@@ -70,6 +70,7 @@ class Provider(object):
@cvar VCL: VCL driver
@cvar KTUCLOUD: kt ucloud driver
@cvar GRIDSPOT: Gridspot driver
+ @cvar GCE: Google Compute Engine driver
"""
DUMMY = 0
EC2 = 1 # deprecated name
@@ -124,6 +125,7 @@ class Provider(object):
RACKSPACE_NOVA_LON = 48
GRIDSPOT = 49
RACKSPACE_NOVA_ORD = 50
+ GCE = 51
class NodeState(object):
diff --git a/libcloud/data/pricing.json b/libcloud/data/pricing.json
index a79f00e..19e20af 100644
--- a/libcloud/data/pricing.json
+++ b/libcloud/data/pricing.json
@@ -204,6 +204,17 @@
"vps_net": {
"1": 0.416
+ },
+
+ "gce": {
+ "n1-standard-1-d": 0.145,
+ "n1-standard-2-d": 0.290,
+ "n1-standard-4-d": 0.580,
+ "n1-standard-8-d": 1.160,
+ "n1-standard-1": 0.145,
+ "n1-standard-2": 0.290,
+ "n1-standard-4": 0.580,
+ "n1-standard-8": 1.160
}
},
diff --git a/libcloud/test/compute/fixtures/gce/deploy_instance.json b/libcloud/test/compute/fixtures/gce/deploy_instance.json
new file mode 100644
index 0000000..a832315
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/deploy_instance.json
@@ -0,0 +1,36 @@
+{
+ "status": "RUNNING",
+ "kind": "compute#instance",
+ "machineType": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-1",
+ "description": "",
+ "zone": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central1-a",
+ "image": "https://www.googleapis.com/compute/v1beta12/projects/google/images/centos-6-2-v20120326",
+ "disks": [{
+ "index": 0,
+ "kind": "compute#attachedDisk",
+ "type": "EPHEMERAL",
+ "mode": "READ_WRITE"
+ }],
+ "name": "foonode2",
+ "networkInterfaces": [{
+ "networkIP": "10.240.64.235",
+ "kind": "compute#networkInterface",
+ "accessConfigs": [{
+ "kind": "compute#accessConfig",
+ "type": "ONE_TO_ONE_NAT",
+ "name": "External NAT",
+ "natIP": "8.35.199.60"
+ }],
+ "network": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/networks/default",
+ "name": "nic0"
+ }],
+ "id": "12990402818933463403",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/instances/foonoden",
+ "metadata": {
+ "items": [{
+ "value": "#! /bin/bash\n# Installs apache and a custom homepage\n\napt-get update\napt-get install -y apache2\ncat <<EOF > /var/www/index.html\n<html><body><h1>Hello World</h1>\n<p>This page was created from a simple start up script!</p>\n</body></html>\nEOF\n",
+ "key": "startup-script"
+ }],
+ "kind": "compute#metadata"
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/deploy_instance_fail.json b/libcloud/test/compute/fixtures/gce/deploy_instance_fail.json
new file mode 100644
index 0000000..7f70fc4
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/deploy_instance_fail.json
@@ -0,0 +1,9 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'projects/googler' was not found"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/insert_instance.json b/libcloud/test/compute/fixtures/gce/insert_instance.json
new file mode 100644
index 0000000..334f2b6
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/insert_instance.json
@@ -0,0 +1,29 @@
+{
+ "status": "RUNNING",
+ "kind": "compute#instance",
+ "machineType": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-1",
+ "description": "",
+ "zone": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central1-a",
+ "image": "https://www.googleapis.com/compute/v1beta12/projects/google/images/centos-6-2-v20120326",
+ "disks": [{
+ "index": 0,
+ "kind": "compute#attachedDisk",
+ "type": "EPHEMERAL",
+ "mode": "READ_WRITE"
+ }],
+ "name": "foonode",
+ "id": "12989505666010310007",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/instances/foonode",
+ "networkInterfaces": [{
+ "networkIP": "10.240.15.80",
+ "kind": "compute#networkInterface",
+ "accessConfigs": [{
+ "kind": "compute#accessConfig",
+ "type": "ONE_TO_ONE_NAT",
+ "name": "External NAT",
+ "natIP": "8.35.199.60"
+ }],
+ "network": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/networks/default",
+ "name": "nic0"
+ }]
+}
diff --git a/libcloud/test/compute/fixtures/gce/insert_instance_fail.json b/libcloud/test/compute/fixtures/gce/insert_instance_fail.json
new file mode 100644
index 0000000..7f70fc4
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/insert_instance_fail.json
@@ -0,0 +1,9 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'projects/googler' was not found"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/install-apache.sh b/libcloud/test/compute/fixtures/gce/install-apache.sh
new file mode 100644
index 0000000..93d70c2
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/install-apache.sh
@@ -0,0 +1,10 @@
+#! /bin/bash
+# Installs apache and a custom homepage
+
+apt-get update
+apt-get install -y apache2
+cat <<EOF > /var/www/index.html
+<html><body><h1>Hello World</h1>
+<p>This page was created from a simple start up script!</p>
+</body></html>
+EOF
diff --git a/libcloud/test/compute/fixtures/gce/list_images.json b/libcloud/test/compute/fixtures/gce/list_images.json
new file mode 100644
index 0000000..99a9a58
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_images.json
@@ -0,0 +1,79 @@
+[{
+ "kind": "compute#image",
+ "name": "centos-6-2-v20120611",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20120611",
+ "description": "CentOS 6.2; Created Mon, 11 Jun 2012 13:15:44 +0000",
+ "creationTimestamp": "2012-06-18T18:05:30.664",
+ "id": "12917726455664967299",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/centos-6-2-v20120611",
+ "sourceType": "RAW"
+}, {
+ "kind": "compute#image",
+ "name": "centos-6-2-v20120621",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20120621",
+ "description": "CentOS 6.2; Created Thu, 21 Jun 2012 14:22:21 +0000",
+ "creationTimestamp": "2012-06-22T05:59:56.392",
+ "id": "12920641029336858796",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/centos-6-2-v20120621",
+ "sourceType": "RAW"
+}, {
+ "kind": "compute#image",
+ "name": "ubuntu-10-04-v20110728",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20110728",
+ "description": "DEPRECATED. Standard minimal Ubuntu 10.04.01 LTS image; Created Thu, 28 Jul 2011 16:45:51 +0000",
+ "creationTimestamp": "2012-07-16T22:18:50.405",
+ "id": "12941198995845323366",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-10-04-v20110728",
+ "sourceType": "RAW"
+}, {
+ "kind": "compute#image",
+ "name": "ubuntu-10-04-v20110929",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20110929",
+ "description": "DEPRECATED. Standard minimal Ubuntu 10.04.01 LTS image; Created Fri, 30 Sep 2011 23:03:27 +0000",
+ "creationTimestamp": "2012-07-16T22:10:06.063",
+ "id": "12941193941298090457",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-10-04-v20110929",
+ "sourceType": "RAW"
+}, {
+ "kind": "compute#image",
+ "name": "ubuntu-10-04-v20111128",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20111123",
+ "description": "DEPRECATED. Standard minimal Ubuntu 10.04.01 LTS image; Created Mon, 28 Nov 2011 22:26:27 +0000",
+ "creationTimestamp": "2012-07-16T22:12:37.523",
+ "id": "12941195401341520479",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-10-04-v20111128",
+ "sourceType": "RAW"
+}, {
+ "kind": "compute#image",
+ "name": "ubuntu-10-04-v20120106",
+ "rawDisk": {
+ "containerType": "TAR",
+ "source": ""
+ },
+ "preferredKernel": "https://www.googleapis.com/compute/v1beta12/projects/google/kernels/gce-20120106",
+ "description": "DEPRECATED. Standard minimal Ubuntu 10.04.01 LTS image; Created Tue, 10 Jan 2012 18:25:24 +0000",
+ "creationTimestamp": "2012-07-16T22:15:18.811",
+ "id": "12941196956151834933",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-10-04-v20120106",
+ "sourceType": "RAW"
+}]
diff --git a/libcloud/test/compute/fixtures/gce/list_images_fail.json b/libcloud/test/compute/fixtures/gce/list_images_fail.json
new file mode 100644
index 0000000..e5fffc9
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_images_fail.json
@@ -0,0 +1,15 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'projects/googler' was not found"
+ }, {
+ "domain": "global",
+ "reason": "invalidParameter",
+ "message": "Invalid unsigned integer value: '-1'.",
+ "locationType": "parameter",
+ "location": "maxResults"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/list_locations.json b/libcloud/test/compute/fixtures/gce/list_locations.json
new file mode 100644
index 0000000..1c72296
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_locations.json
@@ -0,0 +1,29 @@
+[{
+ "status": "UP",
+ "kind": "compute#zone",
+ "description": "us-central1-a",
+ "maintenanceWindows": [{
+ "endTime": "2012-10-28T08:00:00.000",
+ "beginTime": "2012-10-14T08:00:00.000",
+ "name": "2012-10-14-planned-outage",
+ "description": "maintenance zone"
+ }],
+ "creationTimestamp": "2012-05-15T22:15:19.012",
+ "id": "12889558432979476247",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central1-a",
+ "name": "us-central1-a"
+}, {
+ "status": "UP",
+ "kind": "compute#zone",
+ "description": "us-central2-a",
+ "maintenanceWindows": [{
+ "endTime": "2012-12-02T08:00:00.000",
+ "beginTime": "2012-11-11T08:00:00.000",
+ "name": "2012-11-11-planned-outage",
+ "description": "maintenance zone"
+ }],
+ "creationTimestamp": "2012-05-15T22:17:05.592",
+ "id": "12889559460378820818",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central2-a",
+ "name": "us-central2-a"
+}]
diff --git a/libcloud/test/compute/fixtures/gce/list_locations_fail.json b/libcloud/test/compute/fixtures/gce/list_locations_fail.json
new file mode 100644
index 0000000..e5fffc9
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_locations_fail.json
@@ -0,0 +1,15 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'projects/googler' was not found"
+ }, {
+ "domain": "global",
+ "reason": "invalidParameter",
+ "message": "Invalid unsigned integer value: '-1'.",
+ "locationType": "parameter",
+ "location": "maxResults"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/list_machine_types.json b/libcloud/test/compute/fixtures/gce/list_machine_types.json
new file mode 100644
index 0000000..63ce584
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_machine_types.json
@@ -0,0 +1,106 @@
+[{
+ "guestCpus": 2,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "ephemeralDisks": [{
+ "diskGb": 870
+ }],
+ "maximumPersistentDisksSizeGb": "256",
+ "description": "2 vCPUs, 7.5 GB RAM, a 10 GB ephemeral root disk, and an extra 870 GB ephemeral disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-2-d",
+ "memoryMb": 7680,
+ "creationTimestamp": "2012-06-07T20:49:19.448",
+ "id": "12908559582417967837",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-2-d",
+ "hostCpus": 2
+}, {
+ "guestCpus": 4,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "maximumPersistentDisksSizeGb": "512",
+ "description": "4 vCPUs, 15 GB RAM, and a 10 GB ephemeral root disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-4",
+ "memoryMb": 15360,
+ "creationTimestamp": "2012-06-07T20:49:40.050",
+ "id": "12908559692070444049",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-4",
+ "hostCpus": 4
+}, {
+ "guestCpus": 4,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "ephemeralDisks": [{
+ "diskGb": 1770
+ }],
+ "maximumPersistentDisksSizeGb": "512",
+ "description": "4 vCPUs, 15 GB RAM, a 10 GB ephemeral root disk, and an extra 1770 GB ephemeral disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-4-d",
+ "memoryMb": 15360,
+ "creationTimestamp": "2012-06-07T20:50:05.677",
+ "id": "12908559991903153608",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-4-d",
+ "hostCpus": 4
+}, {
+ "guestCpus": 8,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "maximumPersistentDisksSizeGb": "1024",
+ "description": "8 vCPUs, 30 GB RAM, and a 10 GB ephemeral root disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-8",
+ "memoryMb": 30720,
+ "creationTimestamp": "2012-06-07T20:50:42.334",
+ "id": "12908560197989714867",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-8",
+ "hostCpus": 8
+}, {
+ "guestCpus": 8,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "ephemeralDisks": [{
+ "diskGb": 1770
+ }, {
+ "diskGb": 1770
+ }],
+ "maximumPersistentDisksSizeGb": "1024",
+ "description": "8 vCPUs, 30 GB RAM, a 10 GB ephemeral root disk, and 2 extra 1770 GB ephemeral disks",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-8-d",
+ "memoryMb": 30720,
+ "creationTimestamp": "2012-06-07T20:51:19.936",
+ "id": "12908560709887590691",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-8-d",
+ "hostCpus": 8
+}, {
+ "guestCpus": 1,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "maximumPersistentDisksSizeGb": "128",
+ "description": "1 vCPU, 3.75 GB RAM, and a 10 GB ephemeral root disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-1",
+ "memoryMb": 3840,
+ "creationTimestamp": "2012-06-07T20:48:14.670",
+ "id": "12907738072351752276",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-1",
+ "hostCpus": 1
+}, {
+ "guestCpus": 1,
+ "imageSpaceGb": 10,
+ "kind": "compute#machineType",
+ "ephemeralDisks": [{
+ "diskGb": 420
+ }],
+ "maximumPersistentDisksSizeGb": "128",
+ "description": "1 vCPU, 3.75 GB RAM, a 10 GB ephemeral root disk, and an extra 420 GB ephemeral disk",
+ "maximumPersistentDisks": 16,
+ "name": "n1-standard-1-d",
+ "memoryMb": 3840,
+ "creationTimestamp": "2012-06-07T20:48:34.258",
+ "id": "12908559201265214706",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-1-d",
+ "hostCpus": 1
+}]
diff --git a/libcloud/test/compute/fixtures/gce/list_machine_types_fail.json b/libcloud/test/compute/fixtures/gce/list_machine_types_fail.json
new file mode 100644
index 0000000..7001ed8
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_machine_types_fail.json
@@ -0,0 +1,15 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'projects/googler' was not found"
+ }, {
+ "domain": "global",
+ "reason": "invalidParameter",
+ "message": "Invalid unsigned integer value: 'a'.",
+ "locationType": "parameter",
+ "location": "maxResults"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/fixtures/gce/list_nodes.json b/libcloud/test/compute/fixtures/gce/list_nodes.json
new file mode 100644
index 0000000..e8f0d4c
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_nodes.json
@@ -0,0 +1,57 @@
+[{
+ "status": "RUNNING",
+ "kind": "compute#instance",
+ "machineType": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-1",
+ "description": "",
+ "zone": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central1-a",
+ "image": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-12-04-v20120621",
+ "disks": [{
+ "index": 0,
+ "kind": "compute#attachedDisk",
+ "type": "EPHEMERAL",
+ "mode": "READ_WRITE"
+ }],
+ "name": "foo",
+ "id": "12987935077537528637",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/instances/goo",
+ "networkInterfaces": [{
+ "networkIP": "10.240.125.236",
+ "kind": "compute#networkInterface",
+ "accessConfigs": [{
+ "kind": "compute#accessConfig",
+ "type": "ONE_TO_ONE_NAT",
+ "name": "External NAT",
+ "natIP": "173.255.124.43"
+ }],
+ "network": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/networks/default",
+ "name": "nic0"
+ }]
+}, {
+ "status": "RUNNING",
+ "kind": "compute#instance",
+ "machineType": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/machine-types/n1-standard-2",
+ "description": "",
+ "zone": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/us-central1-a",
+ "image": "https://www.googleapis.com/compute/v1beta12/projects/google/images/ubuntu-12-04-v20120621",
+ "disks": [{
+ "index": 0,
+ "kind": "compute#attachedDisk",
+ "type": "EPHEMERAL",
+ "mode": "READ_WRITE"
+ }],
+ "name": "bar",
+ "id": "12982866885143643382",
+ "selfLink": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/instances/hello",
+ "networkInterfaces": [{
+ "networkIP": "10.240.235.139",
+ "kind": "compute#networkInterface",
+ "accessConfigs": [{
+ "kind": "compute#accessConfig",
+ "type": "ONE_TO_ONE_NAT",
+ "name": "External NAT",
+ "natIP": "173.255.116.186"
+ }],
+ "network": "https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/networks/default",
+ "name": "nic0"
+ }]
+}]
diff --git a/libcloud/test/compute/fixtures/gce/list_nodes_fail.json b/libcloud/test/compute/fixtures/gce/list_nodes_fail.json
new file mode 100644
index 0000000..4029c97
--- /dev/null
+++ b/libcloud/test/compute/fixtures/gce/list_nodes_fail.json
@@ -0,0 +1,13 @@
+{
+ "error": {
+ "errors": [{
+ "domain": "global",
+ "reason": "notFound",
+ "message": "The resource 'google.com:ziyadm-devconsol' was not found"
+ }, {
+ "domain": "global",
+ "reason": "invalid",
+ "message": "Invalid value for field 'resource.zone': 'https://www.googleapis.com/compute/v1beta12/projects/google.com:ziyadm-devconsole/zones/hello'. Must be the URL to a Compute resource of the correct type"
+ }]
+ }
+}
diff --git a/libcloud/test/compute/test_gce.py b/libcloud/test/compute/test_gce.py
new file mode 100644
index 0000000..dca2d0d
--- /dev/null
+++ b/libcloud/test/compute/test_gce.py
@@ -0,0 +1,428 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import sys
+import unittest
+import libcloud.test.secrets as secrets
+
+from libcloud.compute.drivers.gce import GoogleComputeEngineNodeDriver
+from libcloud.test.file_fixtures import ComputeFileFixtures
+
+
+class MockAccessConfig:
+ def __init__(self, natIP):
+ self.natIP = natIP
+
+
+class MockImage:
+ def __init__(self, id, selfLink, preferredKernel, description,
+ creationTimestamp):
+ self.id = id
+ self.selfLink = selfLink
+ self.preferredKernel = preferredKernel
+ self.description = description
+ self.creationTimestamp = creationTimestamp
+
+
+class MockInstance:
+ def __init__(self, status, kind, machineType, description, zone, image,
+ disks, networkInterfaces, id, selfLink, name, metadata):
+ self.status = status
+ self.kind = kind
+ self.machineType = machineType
+ self.description = description
+ self.zone = zone
+ self.image = image
+ self.disks = disks
+ self.networkInterfaces = networkInterfaces
+ self.id = id
+ self.selfLink = selfLink
+ self.name = name
+ self.metadata = metadata
+
+
+class MockLocation:
+ def __init__(self, id, name, country):
+ self.id = id
+ self.name = name
+ self.country = country
+
+
+class MockMachine:
+ def __init__(self, id, name, memoryMb, imageSpaceGb, bandwidth, price):
+ self.id = id
+ self.name = name
+ self.memoryMb = memoryMb
+ self.imageSpaceGb = imageSpaceGb
+ self.bandwidth = bandwidth
+ self.price = price
+
+
+class MockNetworkIP:
+ def __init__(self, networkIP, accessConfigs):
+ self.networkIP = networkIP
+ self.accessConfigs = accessConfigs
+
+
+class MockSSHClient():
+ def __init__(self):
+ self.testTypes = {
+ 'reboot_instance': 'PASS'
+ }
+
+ def set_missing_host_key_policy(self, host_key_policy):
+ pass
+
+ def connect(self, host, username=None, pkey=None):
+ pass
+
+ def close(self):
+ pass
+
+ def exec_command(self, command):
+ if self.testTypes['reboot_instance'] == 'PASS':
+ return
+ else:
+ raise Exception
+
+
+class MockGcelibInstance:
+ fixtures = ComputeFileFixtures('gce')
+
+ def __init__(self):
+ self.testTypes = {
+ 'list_nodes': 'PASS',
+ 'list_images': 'PASS',
+ 'list_machine_types': 'PASS',
+ 'list_locations': 'PASS',
+ 'insert_instance': 'PASS',
+ 'deploy_instance': 'PASS'
+ }
+
+ def load_fixtures(self, method_name, test_type):
+ fixture_file_name = method_name
+
+ if test_type == 'FAIL':
+ fixture_file_name += '_fail'
+ fixture_file_name += '.json'
+
+ return json.loads(self.fixtures.load(fixture_file_name))
+
+ def all_instances(self):
+ method_name = 'list_nodes'
+ instance_list = self.load_fixtures(method_name,
+ self.testTypes[method_name])
+ list_mock_instances = []
+
+ for instance in instance_list:
+ if instance == 'error':
+ continue
+ else:
+ mock_network_interface = self._get_mock_network_interfaces(
+ instance)
+ mock_instance = self._to_mock_instance(
+ instance, mock_network_interface)
+ list_mock_instances.append(mock_instance)
+
+ return list_mock_instances
+
+ def list_images(self, project):
+ method_name = 'list_images'
+ image_list = self.load_fixtures(method_name,
+ self.testTypes[method_name])
+ list_mock_images = []
+
+ for image in image_list:
+ if image == 'error':
+ continue
+ else:
+ mock_image = self._to_mock_image(image)
+ list_mock_images.append(mock_image)
+
+ return list_mock_images
+
+ def list_machine_types(self):
+ method_name = 'list_machine_types'
+ machine_type_list = self.load_fixtures(method_name,
+ self.testTypes[method_name])
+ list_mock_machine_types = []
+
+ for machine in machine_type_list:
+ if machine == 'error':
+ continue
+ else:
+ mock_machine = self._to_mock_machine(machine)
+ list_mock_machine_types.append(mock_machine)
+
+ return list_mock_machine_types
+
+ def list_zones(self):
+ method_name = 'list_locations'
+ location_list = self.load_fixtures(method_name,
+ self.testTypes[method_name])
+ list_mock_locations = []
+
+ for location in location_list:
+ if location == 'error':
+ continue
+ else:
+ mock_location = self._to_mock_location(location)
+ list_mock_locations.append(mock_location)
+
+ return list_mock_locations
+
+ def get_instance(self, mock_instance):
+ if mock_instance == 'foonode2':
+ method_name = 'deploy_instance'
+ else:
+ method_name = 'insert_instance'
+ instance_data = self.load_fixtures(method_name,
+ self.testTypes[method_name])
+
+ if instance_data.get('error', None) is None:
+ mock_network_interface = self._get_mock_network_interfaces(
+ instance_data)
+ return self._to_mock_instance(instance_data,
+ mock_network_interface)
+ else:
+ return None
+
+ def insert_instance(self, name, machineType, image, zone, project,
+ metadata):
+ return
+
+ def delete_instance(self, instance):
+ list_nodes = self.all_instances()
+ node_to_destory = list_nodes[0]
+ assert node_to_destory.name == instance
+
+ if self.testTypes['delete_instance'] == 'PASS':
+ return
+ else:
+ raise Exception
+
+ def _get_mock_network_interfaces(self, mock_instance):
+ mock_network_interfaces = []
+
+ for mock_network_interface in mock_instance['networkInterfaces']:
+ mock_access_configs = []
+ for mock_access_config in mock_network_interface['accessConfigs']:
+ mock_access_configs.append(
+ MockAccessConfig(mock_access_config))
+ mock_network_interfaces.append(
+ MockNetworkIP(mock_network_interface['networkIP'],
+ mock_access_configs))
+
+ return mock_network_interfaces
+
+ def _to_mock_instance(self, mock_instance, mock_network_interfaces):
+ mock_instance.setdefault('metadata', None)
+
+ return MockInstance(mock_instance['status'], mock_instance['kind'],
+ mock_instance['machineType'], ['description'],
+ mock_instance['zone'], mock_instance['image'],
+ mock_instance['disks'], mock_network_interfaces,
+ mock_instance['id'], mock_instance['selfLink'],
+ mock_instance['name'], mock_instance['metadata'])
+
+ def _to_mock_image(self, mock_image):
+ return MockImage(mock_image['id'], mock_image['selfLink'],
+ mock_image['preferredKernel'],
+ mock_image['description'],
+ mock_image['creationTimestamp'])
+
+ def _to_mock_location(self, mock_location):
+ mock_location['country'] = 'US'
+
+ return MockLocation(mock_location['id'], mock_location['name'],
+ mock_location['country'])
+
+ def _to_mock_machine(self, mock_machine):
+ mock_machine['bandwidth'] = 0
+ mock_machine['price'] = '123'
+
+ return MockMachine(mock_machine['id'], mock_machine['name'],
+ mock_machine['memoryMb'],
+ mock_machine['imageSpaceGb'],
+ mock_machine['bandwidth'],
+ mock_machine['price'])
+
+
+# TODO(zmir): Determine if there is a way to programmatically generate all test
+# cases, and mock types, and subsequently, automate the entire testing suite
+# for gce.
+class GoogleComputeEngineTest(unittest.TestCase):
+ def setUp(self):
+ ssh_username, ssh_private_key_file, project = getattr(secrets,
+ 'GCE_PARAMS',
+ ())
+ self.driver = GoogleComputeEngineNodeDriver(ssh_username,
+ ssh_private_key_file,
+ project)
+ self.driver.SSHClient = MockSSHClient()
+ self.driver.gcelib_instance = MockGcelibInstance()
+
+ def test_list_nodes(self):
+ self.driver.gcelib_instance.testTypes['list_nodes'] = 'PASS'
+ list_nodes = self.driver.list_nodes()
+ self.assertEqual(len(list_nodes), 2)
+
+ node1, node2 = list_nodes[0], list_nodes[1]
+ self.assertEqual(node1.name, 'foo')
+ self.assertEqual(node2.name, 'bar')
+ self.assertEqual(node1.state, 0)
+ self.assertEqual(node2.state, 0)
+ self.assertEqual(node1.size.split('/')[-1], 'n1-standard-1')
+ self.assertEqual(node2.size.split('/')[-1], 'n1-standard-2')
+
+ self.driver.gcelib_instance.testTypes['list_nodes'] = 'FAIL'
+ list_nodes = self.driver.list_nodes()
+ self.assertEqual(len(list_nodes), 0)
+
+ def test_list_images(self):
+ self.driver.gcelib_instance.testTypes['list_images'] = 'PASS'
+ list_images = self.driver.list_images()
+ self.assertEqual(len(list_images), 6)
+
+ image1, image2 = list_images[0], list_images[-1]
+ self.assertEqual(image1.name.split('/')[-1], 'centos-6-2-v20120611')
+ self.assertEqual(image2.name.split('/')[-1], 'ubuntu-10-04-v20120106')
+ self.assertEqual(image1.id, '12917726455664967299')
+ self.assertEqual(image2.id, '12941196956151834933')
+
+ self.driver.gcelib_instance.testTypes['list_images'] = 'FAIL'
+ list_images = self.driver.list_images()
+ self.assertEqual(len(list_images), 0)
+
+ def test_list_sizes(self):
+ self.driver.gcelib_instance.testTypes['list_machine_types'] = 'PASS'
+ list_sizes = self.driver.list_sizes()
+ self.assertEqual(len(list_sizes), 7)
+
+ size1, size2 = list_sizes[0], list_sizes[-1]
+ self.assertEqual(size1.name, 'n1-standard-2-d')
+ self.assertEqual(size2.name, 'n1-standard-1-d')
+ self.assertEqual(size1.ram, 7680)
+ self.assertEqual(size2.ram, 3840)
+ self.assertEqual(size1.id, '12908559582417967837')
+ self.assertEqual(size2.id, '12908559201265214706')
+
+ self.driver.gcelib_instance.testTypes['list_machine_types'] = 'FAIL'
+ list_sizes = self.driver.list_sizes()
+ self.assertEqual(len(list_sizes), 0)
+
+ def test_list_locations(self):
+ self.driver.gcelib_instance.testTypes['list_machine_types'] = 'PASS'
+ list_locations = self.driver.list_locations()
+ self.assertEqual(len(list_locations), 2)
+
+ location1, location2 = list_locations
+ self.assertEqual(location1.name, 'us-central1-a')
+ self.assertEqual(location2.name, 'us-central2-a')
+ self.assertEqual(location1.id, '12889558432979476247')
+ self.assertEqual(location2.id, '12889559460378820818')
+ self.assertEqual(location1.country, 'US')
+ self.assertEqual(location2.country, 'US')
+
+ self.driver.gcelib_instance.testTypes['list_locations'] = 'FAIL'
+ list_locations = self.driver.list_locations()
+ self.assertEqual(len(list_locations), 0)
+
+ def test_create_node(self):
+ node_name = 'foonode'
+ node_size = self.driver.list_sizes()[0]
+ node_image = self.driver.list_images()[0]
+ node_location = self.driver.list_locations()[0]
+
+ self.driver.gcelib_instance.testTypes['insert_instance'] = 'PASS'
+ new_node = self.driver.create_node(node_name, node_size, node_image,
+ node_location)
+
+ new_node_image = new_node.image.split('/')[-1]
+ new_node_zone = new_node.extra['zone'].split('/')[-1]
+ new_node_machine_type = new_node.extra['machineType'].split('/')[-1]
+
+ self.assertEqual(new_node.name, 'foonode')
+ self.assertEqual(new_node.id, '12989505666010310007')
+ self.assertEqual(new_node.state, 0)
+ self.assertEqual(new_node_image, 'centos-6-2-v20120326')
+ self.assertEqual(new_node_zone, 'us-central1-a')
+ self.assertEqual(new_node_machine_type, 'n1-standard-1')
+
+ self.driver.gcelib_instance.testTypes['insert_instance'] = 'FAIL'
+ new_node = self.driver.create_node(node_name, node_size, node_image,
+ node_location)
+ self.assertEqual(new_node, None)
+
+ def test_reboot_node(self):
+ self.driver.SSHClient.testTypes['reboot_instance'] = 'PASS'
+ node_to_reboot = self.driver.list_nodes()[0]
+ ret = self.driver.reboot_node(node_to_reboot)
+ self.assertTrue(ret)
+
+ self.driver.SSHClient.testTypes['reboot_instance'] = 'FAIL'
+ node_to_reboot = self.driver.list_nodes()[0]
+ ret = self.driver.reboot_node(node_to_reboot)
+ self.assertFalse(ret)
+
+ def test_deploy_node(self):
+ node_name = 'foonode2'
+ node_size = self.driver.list_sizes()[0]
+ node_image = self.driver.list_images()[0]
+ node_location = self.driver.list_locations()[0]
+ script_location = \
+ 'libcloud/test/compute/fixtures/gce/install-apache.sh'
+
+ self.driver.gcelib_instance.testTypes['deploy_instance'] = 'PASS'
+ new_node = self.driver.deploy_node(node_name,
+ node_size,
+ node_image,
+ node_location,
+ script_location)
+
+ new_node_image = new_node.image.split('/')[-1]
+ new_node_zone = new_node.extra['zone'].split('/')[-1]
+ new_node_machine_type = new_node.extra['machineType'].split('/')[-1]
+
+ self.assertEqual(new_node.name, 'foonode2')
+ self.assertEqual(new_node.id, '12990402818933463403')
+ self.assertEqual(new_node.state, 0)
+ self.assertEqual(new_node_image, 'centos-6-2-v20120326')
+ self.assertEqual(new_node_zone, 'us-central1-a')
+ self.assertEqual(new_node_machine_type, 'n1-standard-1')
+
+ self.driver.gcelib_instance.testTypes['deploy_instance'] = 'FAIL'
+ new_node = self.driver.deploy_node(node_name,
+ node_size,
+ node_image,
+ node_location,
+ script_location)
+ self.assertEqual(new_node, None)
+
+ def test_destroy_node(self):
+ list_nodes = self.driver.list_nodes()
+ node_to_destroy = list_nodes[0]
+
+ self.driver.gcelib_instance.testTypes['delete_instance'] = 'PASS'
+ ret = self.driver.destroy_node(node_to_destroy)
+ self.assertTrue(ret)
+
+ self.driver.gcelib_instance.testTypes['delete_instance'] = 'FAIL'
+ ret = self.driver.destroy_node(node_to_destroy)
+ self.assertFalse(ret)
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())
diff --git a/libcloud/common/gcelib/__init__.py b/libcloud/common/gcelib/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/libcloud/common/gcelib/gce.py b/libcloud/common/gcelib/gce.py
new file mode 100644
index 0000000..b4bbef6
--- /dev/null
+++ b/libcloud/common/gcelib/gce.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains a factory function for constructing GoogleComputeEngine objects."""
+
+import logging
+
+VERSIONS = ('v1beta12',)
+DEFAULT_VERSION = 'v1beta12'
+
+
+def get_api(
+ credentials,
+ logging_level=logging.WARN,
+ base_url=None,
+ default_image=None,
+ default_machine_type=None,
+ default_network='default',
+ default_network_interface=None,
+ default_project=None,
+ default_zone=None,
+ version=DEFAULT_VERSION):
+ """Returns a new GoogleComputeEngine object based on the given version."""
+
+ if version not in VERSIONS:
+ raise ValueError('Could not recognize given version: {0}'.format(version))
+
+ gce_library = __import__('gce_' + version, globals(), locals(), [], -1)
+ return gce_library.GoogleComputeEngine(
+ credentials=credentials,
+ logging_level=logging_level,
+ base_url=base_url,
+ default_image=default_image,
+ default_machine_type=default_machine_type,
+ default_network=default_network,
+ default_network_interface=default_network_interface,
+ default_project=default_project,
+ default_zone=default_zone)
diff --git a/libcloud/common/gcelib/gce_base.py b/libcloud/common/gcelib/gce_base.py
new file mode 100644
index 0000000..f83cc3b
--- /dev/null
+++ b/libcloud/common/gcelib/gce_base.py
@@ -0,0 +1,918 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""The base class definition for the generated GoogleComputeEngine class."""
+
+import collections
+import cStringIO
+from email.feedparser import FeedParser
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+import itertools
+import json
+import logging
+import re
+import time
+import urllib
+import urlparse
+
+import httplib2
+from gcelib import shortcuts
+
+LOG_FORMAT = '{start_bold}%(asctime)s{reset_colors} - %(message)s'.format(
+ start_bold='\033[1m', reset_colors='\033[0m')
+
+BASE_URL_VALIDATORS = [
+ re.compile('https://www\.googleapis\.com/compute/[a-z0-9_]+/projects/?')
+]
+
+RESPONSE_ID_REGEX = re.compile('<response-([^>]*)>')
+
+DEFAULT_BASE_URL = 'https://www.googleapis.com/compute/v1beta12/projects/'
+
+# The maximum amount of time that should be spent polling an Operation
+# object before giving up.
+TIMEOUT_SECS = 60
+BATCH_TIMEOUT_SECS = 300
+
+# The maximum number of individual requests comprising one batch. Bigger batches
+# will be split up into smaller payloads to send up to the server.
+MAX_BATCH_SIZE = 1000
+
+
+class GoogleComputeEngineBase(object):
+ """The base class from which the generated code derives."""
+
+ _SELF_LINK_REGEX = re.compile(
+ 'https?://[^/]+/compute/[^/]+/projects/([^/]+)(?:/.*)?')
+
+ API_REQUEST = collections.namedtuple(
+ 'ApiRequest', ('method', 'url', 'query', 'body'))
+
+ BATCH_RESPONSE = collections.namedtuple('BatchResponse', ('response', 'body'))
+
+ def __init__(self, credentials,
+ logging_level=logging.WARN,
+ base_url=None,
+ default_image=None,
+ default_machine_type=None,
+ default_network='default',
+ default_network_interface=None,
+ default_project=None,
+ default_zone=None):
+ """Base class constructor.
+
+ Args:
+ credentials: A OAuth2Credentials object that contains the
+ client's credentials.
+ logging_level: The verbosity of the log messages as defined
+ in the logging module.
+ base_url: The base URL to which REST requests can be made. This
+ should not be changed.
+ default_image: The name of the default image. This value can be
+ overwritten by the different API calls.
+ default_machine_type: The name of the default machine type. This
+ value can be overwritten by the different API calls.
+ default_network: The default network. This value can be overwritten
+ by the different API calls.
+ default_network_interface: The default network interface. This
+ value can be overwritten by the different API calls.
+ default_project: The name of the default project. This value can
+ be overwritten by the different API calls.
+ default_zone: The name of the default zone. This value can be
+ overwritten by the different API calls.
+
+ Raises:
+ ValueError: When an invalid base_url is provided.
+ """
+ self.credentials = credentials
+ if base_url is None and hasattr(self, 'BASE_URL'):
+ base_url = self.BASE_URL
+ if base_url is None:
+ base_url = DEFAULT_BASE_URL
+
+ GoogleComputeEngineBase._check_url(base_url)
+
+ self.base_url = base_url.rstrip('/')
+ self.logger = logging.getLogger('GoogleComputeEngine')
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(LOG_FORMAT))
+ self.logger.addHandler(handler)
+ self.logger.setLevel(logging_level)
+
+ self.default_image = default_image
+ self.default_machine_type = default_machine_type
+ self.default_network = default_network
+ self.default_network_interface = (default_network_interface or
+ shortcuts.network(default_network))
+ self.default_project = default_project
+ self.default_zone = default_zone
+
+ @property
+ def default_image(self):
+ return self._default_image
+
+ @property
+ def default_machine_type(self):
+ return self._default_machine_type
+
+ @property
+ def default_network(self):
+ return self._default_network
+
+ @property
+ def default_network_interface(self):
+ return self._default_network_interface
+
+ @property
+ def default_project(self):
+ return self._default_project
+
+ @property
+ def default_zone(self):
+ return self._default_zone
+
+ @default_image.setter
+ def default_image(self, value):
+ self._default_image = value
+
+ @default_machine_type.setter
+ def default_machine_type(self, value):
+ self._default_machine_type = value
+
+ @default_network.setter
+ def default_network(self, value):
+ self._default_network = value
+
+ @default_network_interface.setter
+ def default_network_interface(self, value):
+ self._default_network_interface = value
+
+ @default_project.setter
+ def default_project(self, value):
+ self._default_project = value
+
+ @default_zone.setter
+ def default_zone(self, value):
+ self._default_zone = value
+
+ @default_image.deleter
+ def default_image(self):
+ self._default_image = None
+
+ @default_machine_type.deleter
+ def default_machine_type(self):
+ self._default_machine_type = None
+
+ @default_network.deleter
+ def default_network(self):
+ self._default_network = 'default'
+
+ @default_network_interface.deleter
+ def default_network_interface(self):
+ self._default_network_interface = shortcuts.network()
+
+ @default_project.deleter
+ def default_project(self):
+ self._default_project = None
+
+ @default_zone.deleter
+ def default_zone(self):
+ self._default_zone = None
+
+ def _normalize(self, project, kind, resource):
+ """Normalizes the URI for the given resource.
+
+ A normalized resource URI contains the base URI, project
+ identifier, and the resource identifier.
+
+ Args:
+ project: The name of the project.
+ kind: The type of the resource (e.g., disks, images).
+ resource: The name of the resource or the resource's URI.
+
+ Returns:
+ The URI to the given resource.
+ """
+ if resource.startswith(self.base_url):
+ return resource
+
+ if resource.startswith('projects/'):
+ return '/'.join((self.base_url, resource[9:]))
+
+ if resource.startswith('/projects/'):
+ return '/'.join((self.base_url, resource[10:]))
+
+ if resource.startswith(kind + '/'):
+ return '/'.join((self.base_url, project, resource))
+
+ return '/'.join((self.base_url, project, kind, resource))
+
+ @staticmethod
+ def _create_url_query(query):
+ """Creates a url query component from a dictionary of (key, value) pairs."""
+ return '&'.join('{0}={1}'.format(key, urllib.quote_plus(str(value)))
+ for key, value in (query or {}).iteritems())
+
+ def _sleep(self, seconds):
+ """Sleeps for specified number of seconds. Can be overriden by tests."""
+ time.sleep(seconds)
+
+ def _send_request(self, path, method='GET', request_body=None,
+ content_type='application/json'):
+ """Send a API request to the server."""
+ while True:
+ headers = {'authorization': 'OAuth ' + self.credentials.access_token}
+ if request_body:
+ headers['content-type'] = content_type
+
+ self.logger.info('Sending {0} request to {1}.'.format(method, path))
+ if request_body:
+ self.logger.debug('Request body: {0}'.format(request_body))
+
+ response, data = httplib2.Http().request(
+ path, method, request_body, headers)
+
+ self.logger.debug('Received response: {0}'.format(response))
+ if data:
+ self.logger.debug('Response body: {0}'.format(data))
+
+ if response.status == 200:
+ return response, data
+ elif 200 < response.status <= 299:
+ return response, None
+ elif response.status == 401:
+ self.credentials.refresh(httplib2.Http())
+ else:
+ err = 'Received response code {0} for {1} on {2}.'.format(
+ response.status, method, path)
+ if data:
+ err = '{0}\nResponse body: {1}'.format(err, data)
+ raise ValueError(err)
+
+ def _send_request_json(self, path, method='GET', body=None):
+ """Sends a request to the server and interprets response body as JSON."""
+ _, data = self._send_request(path, method, body)
+ if data is not None:
+ return json.loads(data)
+
+ def _execute(self, request, blocking=True, parse=True):
+ """Calls the Google Compute Engine backend with the given parameters.
+
+ Args:
+ request: An instance of API_REQUEST named tuple describing the request.
+ blocking: Wait for an asynchronous opration to complete before returning.
+ parse: If True, parse the resulting JSON into an object representation.
+
+ Raises:
+ ValueError: If there is a problem making the request or the given
+ uri is mal-formed.
+
+ Returns:
+ A dict containing the response.
+ """
+ base = urlparse.urlsplit(self.base_url)
+ path = urlparse.urlunsplit(
+ (base.scheme, base.netloc,
+ '{0}/{1}'.format(base.path.rstrip('/'), request.url),
+ GoogleComputeEngineBase._create_url_query(request.query), ''))
+
+ GoogleComputeEngineBase._check_url(path)
+ result = self._send_request_json(path, request.method, request.body)
+ if blocking:
+ result = self._wait_for(operation=result)
+ if parse:
+ result = self._parse(result)
+ return result
+
+ def _wait_for(self, operation, timeout_secs=TIMEOUT_SECS):
+ """Blocks until the given operation's status is DONE.
+
+ Args:
+ operation: The operation to poll. This should be a dict
+ corresponding to an Operation resource.
+ timeout_secs: The maximum amount of time this method will
+ wait for completion of an operation.
+
+ Raises:
+ ValueError: If the timeout expires or the given resource is
+ not an Operation.
+
+ Returns:
+ For non-delete operations, a tuple where the first element is a
+ dict corresponding to the Operation and the second element is
+ the object that was mutated. Deletes return just the operation.
+ """
+ if operation.get('kind') != 'compute#operation':
+ raise ValueError('Only objects of type Operation can be polled.')
+
+ self_link = operation.get('selfLink')
+ if not self_link:
+ raise ValueError('Invalid selfLink.')
+
+ timeout = _TimeoutChecker(timeout_secs)
+ delay = 0.0
+ while True:
+ self.logger.debug('Polling operation {0}...'.format(operation.get('id')))
+ operation = self._send_request_json(self_link)
+
+ if timeout.check_timeout():
+ raise ValueError('Polling timed out.')
+
+ if operation.get('status') == 'DONE':
+ break
+
+ delay = min(max(delay * 1.5, 1.0), 5.0)
+ self.logger.debug('Operation has not completed. Polling again in {0} '
+ 'seconds.'.format(delay))
+ self._sleep(delay)
+
+ self.logger.info('Operation is done.')
+
+ if operation.get('operationType') == 'delete':
+ return operation
+
+ mutated_object = None
+ if 'error' not in operation:
+ target_link = operation.get('targetLink')
+ mutated_object = self._send_request_json(target_link)
+
+ return (operation, mutated_object)
+
+ def _wait_for_list(self, operations, timeout_secs=BATCH_TIMEOUT_SECS):
+ """Waits for completion of batch of asynchronous operations.
+
+ Keeps track of incomplete operations. Requests operation status in batches.
+ As operations transition to "DONE" state it will resolve the operation
+ target resource and include that in the response.
+
+ In case of errors, inserts an error object into the result.
+
+ Args:
+ operations: list of operations to await completion.
+ timeout_secs: timeout in seconds. Infinite if value <= 0.
+
+ Returns:
+ List of resolved resources that the operations created/modified, or None
+ if objects were deleted.
+ """
+ # Results are constructed as list of tuples (index, value) where index is
+ # the index in the original list of operations. At the end they are sorted
+ # and the indices are thrown away.
+ results = []
+
+ operations = list(operations)
+ op_indices = range(len(operations)) # Parallel list with indices for sort.
+
+ delay = 0.0
+ timeout = _TimeoutChecker(timeout_secs)
+
+ while operations:
+ requests = []
+ request_indices = []
+
+ timeout_occurred = timeout.check_timeout()
+
+ for index, operation in itertools.izip(op_indices, operations):
+ kind = operation.get('kind')
+ if kind is None:
+ results.append((index, GoogleComputeEngineError(
+ 400, 'Server returned invalid resource.', operation)))
+ continue
+
+ if kind != 'compute#operation':
+ # We just resolved the resource, append it to the result.
+ results.append((index, operation))
+ continue
+
+ if operation.get('status') != 'DONE':
+ # Still waiting for the operation.
+ request_url = operation.get('selfLink')
+ if not request_url:
+ results.append((index, GoogleComputeEngineError(
+ 400, 'Operation is missing selfLink property.', operation)))
+ continue
+ else:
+ # Operation is done, check for error status and schedule fetching of
+ # the completed resource.
+ if 'error' in operation:
+ results.append((index, GoogleComputeEngineError(
+ operation.get('httpErrorStatusCode', 400),
+ operation.get('httpErrorMessage', 'Unknown error'),
+ operation)))
+ continue
+
+ request_url = operation.get('targetLink')
+ if operation.get('operationType') == 'delete' or not request_url:
+ results.append((index, None))
+ continue
+
+ if timeout_occurred:
+ results.append((index, GoogleComputeEngineError(
+ 400, 'Asynchronous operation timeout.', operation)))
+ else:
+ requests.append(GoogleComputeEngineBase.API_REQUEST(
+ 'GET', request_url, None, None))
+ request_indices.append(index)
+
+ if not requests:
+ break # No more requests to send up to the server.
+
+ # Delay before sending the request.
+ delay = min(max(delay * 1.5, 1.0), 5.0)
+ self._sleep(delay)
+
+ # Send the requests to the server as a batch.
+ responses = self._execute_batch_request(requests)
+
+ # Process the responses.
+ resources = []
+ resource_indices = []
+ for index, response in itertools.izip(request_indices, responses):
+ if response.response.status >= 300:
+ results.append((index, GoogleComputeEngineError(
+ response.response.status,
+ response.response.reason,
+ response.body)))
+ continue
+
+ if not response.body:
+ results.append((index, GoogleComputeEngineError(
+ 400, 'Server returned an empty resource as a response.', None)))
+ continue
+
+ try:
+ resource = json.loads(response.body)
+ except ValueError:
+ results.append((index, GoogleComputeEngineError(
+ 400, 'Server returned invalid JSON response.',
+ response.body)))
+ continue
+
+ resources.append(resource)
+ resource_indices.append(index)
+
+ operations = resources
+ op_indices = resource_indices
+
+ results.sort(key=lambda r: r[0])
+ return [r[1] for r in results]
+
+ def _parse_list(self, resources):
+ """Parses a list of resources returned from the server."""
+ results = []
+ for r in resources:
+ try:
+ result = self._parse(r)
+ except (KeyError, ValueError):
+ result = GoogleComputeEngineError(
+ 400, 'Invalid resource returned by the server.', r)
+ results.append(result)
+ return results
+
+ def _generate(self, method, uri, query_params, result_type):
+ """Generates all resources described by the given parameters.
+
+ This method makes the list methods easier to use by taking care of
+ paging under the covers (since each list method can return at most
+ 100 resources).
+
+ Args:
+ method: The method to use to fetch the resources.
+ uri: The location of the resources.
+ query_params: Query parameters that can be used to filter the
+ results.
+ result_type: The type of the resource.
+
+ Yields:
+ One resource at a time.
+ """
+ query_params = dict(query_params)
+ request = GoogleComputeEngineBase.API_REQUEST(
+ method, uri, query_params, None)
+ while True:
+ result = self._execute(request, False, False)
+ items = result.get('items')
+ next_page_token = result.get('nextPageToken')
+ if not items:
+ break
+ for item in items:
+ if result_type:
+ item = result_type.from_json(item, self)
+ yield item
+ if not next_page_token:
+ break
+ query_params['pageToken'] = next_page_token
+
+ def _get_parsers(self):
+ """Returns a dict that maps resource types to parsing functions.
+
+ The resource types should be strings that identify a resource
+ (e.g., 'compute#instance') and the parsing functions should
+ construct an object defined in the generated code from a resource
+ dict.
+ """
+ raise NotImplementedError('_get_parsers() must be implemented by subclass.')
+
+ def _parse(self, val):
+ """Parses the given val (a dict) into a resource object.
+
+ Args:
+ val: A dict representing a resource. The dict must contain a
+ 'kind' key that specifies the type of the resource (e.g.,
+ 'compute#instance'). If val is a two-element tuple, both
+ elements in the tuple are converted.
+
+ Raises:
+ ValueError: If val is a tuple, but does not contain exactly two
+ elements.
+ KeyError: If any of the dicts passed in do not contain the key
+ 'kind' or if no conversion function exists for the given kind.
+
+ Returns:
+ An object that corresponds to the type of val if val is not a
+ tuple, a two-element tuple that contains two objects
+ corresponding to the elements of the tuple if val is a tuple,
+ None if val is None, or val if val is an exception.
+ """
+ if val is None:
+ return None
+ if isinstance(val, BaseException):
+ return val
+ if isinstance(val, tuple):
+ if len(val) != 2:
+ raise ValueError('Expected two-element tuple.')
+ return (self._parse(val[0]), self._parse(val[1]))
+
+ kind = val.get('kind')
+ if not kind:
+ raise KeyError('No kind attribute found in input.')
+
+ func = self._get_parsers().get(kind)
+ if func is None:
+ raise KeyError('No conversion function found.')
+ return func(val, self)
+
+ @staticmethod
+ def _parse_project(self_link):
+ """Extracts project name from the absolute URL of selfLink property."""
+ if self_link is not None:
+ match = GoogleComputeEngineBase._SELF_LINK_REGEX.match(self_link)
+ if match:
+ return match.group(1)
+ return None
+
+ @staticmethod
+ def _combine(list1, list2):
+ """Combines two sequences much like izip, allowing either to be None."""
+ if list1 is not None:
+ if list2 is not None:
+ # This won't work for inputs being generators. Consider allowing them.
+ if len(list1) != len(list2):
+ raise ValueError('List of objects and names must be equal length')
+ return itertools.izip(list1, list2)
+ else:
+ return itertools.izip(list1, itertools.repeat(None))
+ elif list2 is not None:
+ return itertools.izip(itertools.repeat(None), list2)
+ else:
+ return []
+
+ def _serialize_batch_api_request(self, base_path, request):
+ """Serializes one API request into an individual MIME part.
+
+ The MIME part becomes part of the larger multipart MIME message. Each
+ individual part contains a single API request in the format:
+
+ VERB url
+ [Content-Type: application/json
+ Content-Length: <length>
+
+ JSON payload]
+
+ The section enclosed in [] is only present for requests with JSON payload.
+
+ Args:
+ base_path: The API service base path.
+ request: An instance of GoogleComputeEngineBase.API_REQUEST named tuple.
+
+ Returns:
+ string containing serialized HTTP request to be assembled into MIME
+ multipart HTTP request.
+ """
+ request_url_split = urlparse.urlsplit(request.url)
+ if request_url_split.netloc:
+ url_path = request_url_split.path
+ else:
+ url_path = '{0}/{1}'.format(base_path, request.url)
+ url = urlparse.urlunsplit((
+ None, None, url_path,
+ GoogleComputeEngineBase._create_url_query(request.query), ''))
+
+ result = '{0} {1}\n'.format(request.method, url.encode('utf-8'))
+ if request.body:
+ body = cStringIO.StringIO()
+ body.write(result)
+ body.write('Content-Type: application/json\n')
+ body.write('Content-Length: {0}\n\n'.format(len(request.body)))
+ body.write(request.body)
+ result = body.getvalue()
+ return result
+
+ def _parse_batch_api_response(self, response):
+ """Parses an individual part of the MIME multipart server response.
+
+ Args:
+ response: One part of the MIME mutlipart message, string.
+ Raises:
+ ValueError: if an invalid HTTP header is encountered.
+ Returns:
+ An instance of GoogleComputeEngineBase.BATCH_RESPONSE named tuple.
+ """
+ status, payload = response.split('\n', 1)
+ split = status.split(None, 2)
+ if len(split) > 1:
+ status = split[1]
+ reason = split[2] if len(split) > 2 else ''
+ else:
+ raise ValueError('Invalid HTTP server response.')
+
+ parser = FeedParser()
+ parser.feed(payload)
+ msg = parser.close()
+ msg['status'] = status
+ http_response = httplib2.Response(msg)
+ http_response.reason = reason
+ payload = msg.get_payload()
+ return GoogleComputeEngineBase.BATCH_RESPONSE(http_response, payload)
+
+ def _send_batch_request(self, requests):
+ """Sends a batch of requests to the server and processes the HTTP responses.
+
+ Args:
+ requests: List of GoogleComputeEngineBase.API_REQUEST named tuples. Must
+ contain <= MAX_BATCH_SIZE elements.
+
+ Raises:
+ ValueError: If requests has more than MAX_BATCH_SIZE elements.
+
+ Returns:
+ List of GoogleComputeEngineBase.BATCH_RESPONSE named tuples, one for
+ each element of request parameter.
+ """
+ if len(requests) > MAX_BATCH_SIZE:
+ raise ValueError('Too many requests provided'
+ '(maximum is {0})'.format(MAX_BATCH_SIZE))
+
+ batch = _BatchApiRequest()
+ base = urlparse.urlsplit(self.base_url)
+ base_path = base.path.rstrip('/')
+ for i, request in enumerate(requests):
+ msg = MIMENonMultipart('application', 'http')
+ msg.add_header('Content-ID', '<{0}>'.format(i))
+ msg.set_payload(self._serialize_batch_api_request(base_path, request))
+ batch.attach(msg)
+
+ batch_string = batch.as_string()
+ content_type = 'multipart/mixed; boundary="{0}"'.format(
+ batch.get_boundary())
+
+ response, data = self._send_request(
+ urlparse.urlunsplit((base.scheme, base.netloc, 'batch', None, None)),
+ 'POST', batch_string, content_type)
+
+ if response.status >= 300:
+ error = GoogleComputeEngineError(response.status, response.reason, data)
+ return [error] * len(requests) # Return all errors.
+ elif not data:
+ error = GoogleComputeEngineError(400, 'Server returned no data', '')
+ return [error] * len(requests) # Return all errors.
+
+ # Process successful response.
+ data = 'content-type: {0}\r\n\r\n'.format(response['content-type']) + data
+ parser = FeedParser()
+ parser.feed(data)
+ response = parser.close()
+
+ responses = []
+ for part in response.get_payload():
+ responses.append((
+ int(RESPONSE_ID_REGEX.match(part['Content-ID']).group(1)),
+ self._parse_batch_api_response(part.get_payload())))
+
+ responses.sort(key=lambda r: r[0])
+ return [r[1] for r in responses]
+
+ def _execute_batch_request(self, requests, batch_size=MAX_BATCH_SIZE):
+ """Executes a batch request.
+
+ The server imposes a limit on how many requests can be in the batch.
+ This function will split up requests into smaller batches if needed.
+ If any request from the batch fails with expired access token, credentials
+ will be refreshed and the failed request will be retried.
+
+ Args:
+ requests: iterable of GoogleComputeEngineBase.API_REQUEST objects.
+ batch_size: size of the individual batch to send to the server.
+
+ Returns:
+ List of responses from the server - instances of
+ GoogleComputeEngineBase.BATCH_RESPONSE named tuples.
+ """
+ batch_size = min(batch_size, MAX_BATCH_SIZE)
+ requests = list(requests)
+ responses = []
+
+ while requests:
+ # Take only the first MAX_BATCH_SIZE requests.
+ batch_request = requests[:batch_size]
+ requests = requests[batch_size:]
+
+ batch_response = self._send_batch_request(batch_request)
+
+ redo_indices = [i for i, r in enumerate(batch_response)
+ if r.response.status == 401]
+ if redo_indices:
+ redo_requests = [batch_request[i] for i in redo_indices]
+ self.credentials.refresh(httplib2.Http())
+ redo_responses = self._send_batch_request(redo_requests)
+ for i, r in enumerate(redo_responses):
+ batch_response[redo_indices[i]] = r
+
+ responses.extend(batch_response)
+
+ return responses
+
+ def _execute_list(self, requests, blocking=True, parse=True):
+ """Executes list of API requests.
+
+ Args:
+ requests: Iterable of request, each is an instance of API_REQUEST named
+ tuple defined above.
+ blocking: Wait for asynchronous operations to complete before returning.
+ parse: If True ,parse the resulting JSON into an object representation.
+
+ Returns:
+ List of response objects (unparsed).
+ """
+ responses = self._execute_batch_request(requests)
+ # Extract successful responses, process errors
+ success_indices = []
+ for i, r in enumerate(responses):
+ if 200 <= r.response.status <= 299:
+ success_indices.append(i)
+ response_json = None
+ if r.body:
+ try:
+ response_json = json.loads(r.body)
+ except ValueError:
+ pass
+ responses[i] = response_json
+ else:
+ responses[i] = GoogleComputeEngineError(
+ r.response.status, r.response.reason, r.body)
+
+ if success_indices:
+ successes = [responses[i] for i in success_indices]
+ if blocking:
+ successes = self._wait_for_list(successes)
+ if parse:
+ successes = self._parse_list(successes)
+
+ for i, s in enumerate(successes):
+ responses[success_indices[i]] = s
+
+ return responses
+
+ @staticmethod
+ def _check_url(url):
+ """Ensures that the given URL conforms to the expected API URL.
+
+ Args:
+ url: The URL to check.
+
+ Raises:
+ ValueError: If the base URL is malformed.
+ """
+ for validator in BASE_URL_VALIDATORS:
+ if validator.match(url):
+ return
+
+ raise ValueError(
+ 'Invalid base URL. '
+ 'The URL {0} must match one of the following patterns: ({1})'.format(
+ repr(url),
+ ', '.join([repr(v.pattern) for v in BASE_URL_VALIDATORS])))
+
+ @staticmethod
+ def _strings_to_json(value):
+ """Serializes iterable of strings to list. Promotes string to list.
+
+ Args:
+ value: the value to convert to list of strings. It can be a iterable of
+ strings or an individual string, in which case it is promoted to list.
+ Returns:
+ List of strings.
+ Raises:
+ ValueError: If the value is None.
+ """
+ if value is None:
+ raise ValueError('strings cannot be None.')
+ elif isinstance(value, basestring):
+ return [value]
+ else:
+ return list(value)
+
+ @staticmethod
+ def _json_to_strings(value):
+ """Deserializes list of strings from json.
+
+ Used by the generated code to parse the list of string values. Basically
+ only creates copy of the list but tolerates None.
+
+ Input: ['value1', 'value2', ... 'valueN'], or None
+
+ Args:
+ value: The list deserialized from json.
+ Returns:
+ List of strings extracted from the json list.
+ """
+ return None if value is None else list(value)
+
+
+class ListObjectBase(object):
+ """Common base class for all classes representing lists of objects."""
+ __slots__ = ['__items']
+
+ def __init__(self, items):
+ self.__items = items
+
+ def __iter__(self):
+ if self.__items is not None:
+ for i in self.__items:
+ yield i
+
+ def __len__(self):
+ return len(self.__items) if self.__items is not None else 0
+
+ def __getitem__(self, index):
+ return self.items.__getitem__(index)
+
+
+class GoogleComputeEngineError(Exception):
+ """The error returned by the batch operations.
+
+ In addition to being an exception so it can be raised, it also has False
+ boolean value so that it can be easily detected.
+ """
+ __slots__ = ('status', 'reason', 'body')
+
+ def __init__(self, status, reason, body):
+ Exception.__init__(self, reason)
+ self.status = status
+ self.reason = reason
+ self.body = body
+
+ def __nonzero__(self):
+ return False
+
+
+class _BatchApiRequest(MIMEMultipart):
+ """Represents a batch API request, overrides header writing behavior."""
+
+ def _write_headers(self, msg):
+ # We write the headers for the multipart message manually so to that end we
+ # override _write_headers to signal the email library that we implement our
+ # own header logic.
+ pass
+
+
+class _TimeoutChecker(object):
+ """Tracks a time at which the operation will have timed out."""
+ __slots__ = ('_timeout')
+
+ def __init__(self, timeout_secs):
+ """Initializes TimeoutChecker with number of seconds until the timeout.
+
+ The function will calculate the absolute point in the future at which point
+ the timeout occurs.
+
+ Args:
+ timeout_secs: Number of seconds until timeout.
+ """
+ self._timeout = (time.time() + timeout_secs) if timeout_secs >= 0 else -1
+
+ def check_timeout(self):
+ """Returns True if timeout occurred."""
+ return self._timeout > 0 and time.time() > self._timeout
diff --git a/libcloud/common/gcelib/gce_base_test.py b/libcloud/common/gcelib/gce_base_test.py
new file mode 100755
index 0000000..6240925
--- /dev/null
+++ b/libcloud/common/gcelib/gce_base_test.py
@@ -0,0 +1,748 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for gce_base."""
+
+from email.mime.nonmultipart import MIMENonMultipart
+import itertools
+import json
+import logging
+import random
+import re
+import unittest
+
+import httplib2
+from gcelib import gce_base
+
+
+class OperationMock(object):
+
+ @staticmethod
+ def load_from_dict(unused_val, unused_gce):
+ return OperationMock()
+
+
+class InstanceMock(object):
+
+ @staticmethod
+ def load_from_dict(unused_val, unused_gce):
+ return InstanceMock()
+
+
+def get_mock_parsers():
+ """Returns a dict making kinds to parsers."""
+ return {
+ 'compute#operation': OperationMock.load_from_dict,
+ 'compute#instance': InstanceMock.load_from_dict}
+
+
+class GoogleComputeEngineBaseTests(unittest.TestCase):
+ """Tests for GoogleComputeEngineBase."""
+
+ def setUp(self):
+ self.gce = gce_base.GoogleComputeEngineBase(
+ None,
+ logging_level=logging.DEBUG,
+ base_url='https://www.googleapis.com/compute/v1/projects/')
+
+ def tearDown(self):
+ pass
+
+ def test_normalize(self):
+ """Tests resource normalization."""
+ self.assertEqual(
+ self.gce._normalize('my-project', 'instances', 'my-instance'),
+ 'https://www.googleapis.com/compute/v1/projects/my-project/instances/'
+ 'my-instance')
+
+ self.assertEqual(
+ self.gce._normalize('my-project', 'instances', 'instances/my-instance'),
+ 'https://www.googleapis.com/compute/v1/projects/my-project/instances/'
+ 'my-instance')
+
+ self.assertEqual(
+ self.gce._normalize('my-project', 'instances',
+ 'projects/my-project/instances/my-instance'),
+ 'https://www.googleapis.com/compute/v1/projects/my-project/instances/'
+ 'my-instance')
+
+ self.assertEqual(
+ self.gce._normalize('my-project', 'instances', 'instances'),
+ 'https://www.googleapis.com/compute/v1/projects/my-project/instances/'
+ 'instances')
+
+ self.assertEqual(
+ self.gce._normalize('projectsprojects', 'instances',
+ 'projects/projectsprojects/instances/my-instance'),
+ 'https://www.googleapis.com/compute/v1/projects/projectsprojects/'
+ 'instances/my-instance')
+
+ self.assertEqual(
+ self.gce._normalize('my-project', 'images', 'ubuntu-12-04-v20120503'),
+ 'https://www.googleapis.com/compute/v1/projects/my-project/images/'
+ 'ubuntu-12-04-v20120503')
+
+ self.assertEqual(
+ self.gce._normalize('my-project', 'images',
+ 'projects/google/images/ubuntu-12-04-v20120503'),
+ 'https://www.googleapis.com/compute/v1/projects/google/images/'
+ 'ubuntu-12-04-v20120503')
+
+ def test_defaults(self):
+ """Tests the default properties."""
+ self.assertEqual(self.gce.default_image, None)
+ self.assertEqual(self.gce.default_machine_type, None)
+ self.assertEqual(
+ self.gce.default_network_interface,
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'default'}])
+ self.assertEqual(self.gce.default_network, 'default')
+ self.assertEqual(self.gce.default_project, None)
+ self.assertEqual(self.gce.default_zone, None)
+
+ self.gce.default_image = (
+ 'projects/google/images/ubuntu-12-04-v20120503')
+ self.gce.default_machine_type = 'n1-standard-1-ssd'
+ self.gce.default_network = 'my-network'
+ self.gce.default_network_interface = (
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'my-network-interface'}])
+ self.gce.default_project = 'my-project'
+ self.gce.default_zone = 'us-east-a'
+
+ self.assertEqual(self.gce.default_image,
+ 'projects/google/images/ubuntu-12-04-v20120503')
+ self.assertEqual(self.gce.default_machine_type, 'n1-standard-1-ssd')
+ self.assertEqual(self.gce.default_network, 'my-network')
+ self.assertEqual(
+ self.gce.default_network_interface,
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'my-network-interface'}])
+ self.assertEqual(self.gce.default_project, 'my-project')
+ self.assertEqual(self.gce.default_zone, 'us-east-a')
+
+ del self.gce.default_image
+ del self.gce.default_machine_type
+ del self.gce.default_network
+ del self.gce.default_network_interface
+ del self.gce.default_project
+ del self.gce.default_zone
+
+ self.assertEqual(self.gce.default_image, None)
+ self.assertEqual(self.gce.default_machine_type, None)
+ self.assertEqual(self.gce.default_network, 'default')
+ self.assertEqual(
+ self.gce.default_network_interface,
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'default'}])
+ self.assertEqual(self.gce.default_project, None)
+ self.assertEqual(self.gce.default_zone, None)
+
+ gce = gce_base.GoogleComputeEngineBase(
+ None,
+ default_image='projects/google/images/ubuntu-12-04-v20120503',
+ default_machine_type='n1-standard-1-ssd',
+ default_network='my-network',
+ default_network_interface=(
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT',
+ 'name': 'External NAT'}],
+ 'network': 'my-network-interface'}]),
+ default_project='my-project',
+ default_zone='us-east-a')
+
+ self.assertEqual(gce.default_image,
+ 'projects/google/images/ubuntu-12-04-v20120503')
+ self.assertEqual(gce.default_machine_type, 'n1-standard-1-ssd')
+ self.assertEqual(gce.default_network, 'my-network')
+ self.assertEqual(
+ gce.default_network_interface,
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'my-network-interface'}])
+ self.assertEqual(gce.default_project, 'my-project')
+ self.assertEqual(gce.default_zone, 'us-east-a')
+
+ gce = gce_base.GoogleComputeEngineBase(
+ None,
+ default_network='my-network')
+ self.assertEqual(
+ gce.default_network_interface,
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'my-network'}])
+
+ def test_execute_with_http(self):
+ """Ensures that _execute() will not communicate over HTTP."""
+ self.assertRaises(ValueError,
+ gce_base.GoogleComputeEngineBase,
+ credentials=None,
+ base_url='http://www.googleapis.com/compute/v1/projects/')
+
+ def test_base_url_checks(self):
+ # All these should work
+ self.assertRaises(
+ ValueError,
+ gce_base.GoogleComputeEngineBase,
+ credentials=None,
+ base_url='https://www.googleapis.com/nocompute/v1/projects/')
+
+ def test_execute(self):
+ """Tests _execute()'s ability to build up a correct request URL."""
+
+ def mock_send_request_json(path, method, request_body):
+ self.assertEqual(
+ path,
+ 'https://www.googleapis.com/compute/v1/projects/my-project/'
+ 'instances?filter=name%2Beq%2B%27.%2A%2Fmy_instance_%5B0-9%5D%2B%27'
+ '&pageToken='
+ 'CghJTlNUQU5DRRIhNzQyMzg3MDc3NTUuY3JlYXRlZC1qdW4tNi1udW0t'
+ '&maxResults=100')
+ self.assertEqual(method, 'GET')
+ self.assertEqual(request_body, None)
+
+ query_params = {
+ 'pageToken': 'CghJTlNUQU5DRRIhNzQyMzg3MDc3NTUuY3JlYXRlZC1qdW4tNi1udW0t',
+ 'filter': 'name+eq+\'.*/my_instance_[0-9]+\'',
+ 'maxResults': 100}
+
+ self.gce._send_request_json = mock_send_request_json
+ self.gce._execute(gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET', 'my-project/instances', query_params, None), False)
+
+ def test_convert(self):
+ """Ensures that _convert() correctly parses dict and tuples of dicts."""
+
+ def get_empty_parsers():
+ """Returns a dict that contains no kind-to-parser mappings."""
+ return {}
+
+ self.gce._get_parsers = get_mock_parsers
+
+ self.assertEqual(self.gce._parse(None), None)
+ self.assertEqual(self.gce._parse((None, None)),
+ (None, None))
+ self.assertRaises(ValueError, self.gce._parse, (None, None, None))
+ value_error = ValueError()
+ self.assertEqual(self.gce._parse(value_error), value_error)
+
+ operation = {
+ 'status': 'DONE',
+ 'kind': 'compute#operation',
+ 'name': '.../operation-1339021242481-4c1d52d7d64f0-63c9dc72',
+ 'startTime': '2012-06-06T22:20:42.601',
+ 'insertTime': '2012-06-06T22:20:42.481',
+ 'targetId': '12884714477555140369',
+ 'targetLink': 'https://googleapis.com/compute/.../instances/x-1000',
+ 'operationType': 'insert',
+ 'progress': 100,
+ 'endTime': '2012-06-06T22:20:49.268',
+ 'id': '12907884892091471776',
+ 'selfLink': 'https://googleapis.com/compute/...d64f0-63c9dc72',
+ 'user': 'bugsbunny@google.com'}
+
+ instance = {
+ 'status': 'STAGING',
+ 'kind': 'compute#instance',
+ 'machineType': 'https://googleapis.com/compute/.../n1-standard-1',
+ 'name': 'projects/my-project/instances/x-1000',
+ 'zone': 'https://googleapis.com/compute/.../zones/us-east-a',
+ 'tags': [],
+ 'image': 'https://googleapis.com/compute/.../images/ubuntu',
+ 'disks': [
+ {
+ 'index': 0,
+ 'kind': 'compute#instanceDisk',
+ 'type': 'EPHEMERAL',
+ 'mode': 'READ_WRITE'
+ }
+ ],
+ 'networkInterfaces': [
+ {
+ 'networkIP': '10.211.197.175',
+ 'kind': 'compute#instanceNetworkInterface',
+ 'accessConfigs': [
+ {
+ 'type': 'ONE_TO_ONE_NAT',
+ 'name': 'External NAT',
+ 'natIP': '173.255.120.98'
+ }
+ ],
+ 'name': 'nic0',
+ 'network': 'https://googleapis.com/compute/.../networks/default'
+ }
+ ],
+ 'id': '12884714477555140369',
+ 'selfLink': 'https://googleapis.com/compute/.../instances/x-1000',
+ 'description': ''}
+
+ self.assertRaises(ValueError, self.gce._parse, (operation, instance, None))
+
+ res = self.gce._parse(operation)
+ self.assertTrue(isinstance(res, OperationMock))
+ res = self.gce._parse(instance)
+ self.assertTrue(isinstance(res, InstanceMock))
+
+ res = self.gce._parse((operation, None))
+ self.assertTrue(isinstance(res[0], OperationMock))
+ self.assertEqual(res[1], None)
+
+ res = self.gce._parse((operation, instance))
+ self.assertTrue(isinstance(res[0], OperationMock))
+ self.assertTrue(isinstance(res[1], InstanceMock))
+
+ del operation['kind']
+ self.assertRaises(KeyError, self.gce._parse, operation)
+
+ self.gce._get_parsers = get_empty_parsers
+ self.assertRaises(KeyError, self.gce._parse, instance)
+
+ def test_project_from_self_link(self):
+ parse = gce_base.GoogleComputeEngineBase._parse_project
+
+ self.assertEqual(
+ parse('http://googleapis.com/compute/v1/projects/my-project'),
+ 'my-project')
+ self.assertEqual(
+ parse('https://googleapis.com/compute/v1beta11/projects/my-project/'),
+ 'my-project')
+ self.assertEqual(
+ parse('https://googleapis.com/compute/v1beta11/projects'
+ '/my-project/instances/foo'),
+ 'my-project')
+ self.assertEqual(
+ parse('//googleapis.com/compute/v1beta11/projects/my-project/xxxx'),
+ None)
+ self.assertEqual(
+ parse('http://googleapis.com/invalid/version/projects/my-project'),
+ None)
+ self.assertEqual(
+ parse('http://googleapis.com/compute/version/noprojects/my-project'),
+ None)
+ self.assertEqual(
+ parse('https://googleapis.com/compute/version/projects/'),
+ None)
+
+ def test_check_url(self):
+ """Ensures that _check_url() raises an exception on bad API URLs."""
+ check = gce_base.GoogleComputeEngineBase._check_url
+ # Success cases.
+ check('https://www.googleapis.com/compute/v1/projects/')
+ check('https://www.googleapis.com/compute/v1beta12/projects/')
+
+ # Failure cases.
+ self.assertRaises(ValueError, check, '')
+ self.assertRaises(ValueError, check,
+ 'http://www.googleapis.com/compute/v1/projects/')
+ self.assertRaises(ValueError, check,
+ 'https://googleapis.com/compute/v1/projects/')
+ self.assertRaises(ValueError, check,
+ 'https://www.gmail.com/compute/v1/projects/')
+ self.assertRaises(ValueError, check,
+ 'http://www.googleapis.com/compute//projects/')
+ self.assertRaises(ValueError, check,
+ 'http://www.googleapis.com/compute/BAD_VERSION/projects/')
+ self.assertRaises(ValueError, check,
+ 'http://www.googleapis.com/compute/v1/')
+ self.assertRaises(ValueError, check,
+ 'http://www.googleapis.com/compute/v1/projects')
+ self.assertRaises(ValueError, check,
+ 'www.googleapis.com/compute/v1/projects')
+ self.assertRaises(ValueError, check,
+ 'https://www.googleapis.com/v1/projects')
+
+ def test_create_url_query(self):
+ """Tests the url query component creation."""
+ self.assertEqual('', self.gce._create_url_query(None))
+ self.assertEqual('', self.gce._create_url_query({}))
+ self.assertEqual('key=value', self.gce._create_url_query({'key': 'value'}))
+ self.assertEqual('a=hello&b=hi',
+ self.gce._create_url_query({'a': 'hello', 'b': 'hi'}))
+
+ def test_parse_list(self):
+ resources = [
+ {'kind': 'compute#operation'},
+ {'kind': 'compute#instance'},
+ gce_base.GoogleComputeEngineError(400, '', ''),
+ {'kind': 'compute#unknown'},
+ {},
+ ]
+ self.gce._get_parsers = get_mock_parsers
+ results = self.gce._parse_list(resources)
+ self.assertTrue(isinstance(results[0], OperationMock))
+ self.assertTrue(isinstance(results[1], InstanceMock))
+ self.assertTrue(results[2] is resources[2])
+ self.assertTrue(isinstance(results[3], gce_base.GoogleComputeEngineError))
+ self.assertTrue(isinstance(results[4], gce_base.GoogleComputeEngineError))
+
+ def test_serialize_batch_api_reqeuest(self):
+ base_path = '/compute/v1beta12/projects'
+ request = gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'DELETE', 'my-project/instances/my-instance', None, None)
+ self.assertEqual(
+ 'DELETE /compute/v1beta12/projects/my-project/instances/my-instance\n',
+ self.gce._serialize_batch_api_request(base_path, request))
+
+ request = gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'POST', 'my-project/instances', None, '{"kind": "compute#instance"}')
+ self.assertEqual(
+ 'POST /compute/v1beta12/projects/my-project/instances\n'
+ 'Content-Type: application/json\n'
+ 'Content-Length: 28\n\n'
+ '{"kind": "compute#instance"}',
+ self.gce._serialize_batch_api_request(base_path, request))
+
+ # Full path - used by the wait_for_list functionality.
+ request = gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET',
+ 'https://www.googleapis.com/compute/v1beta12/projects/my-project/'
+ 'operations/operation-1234567890', None, None)
+ self.assertEqual(
+ 'GET /compute/v1beta12/projects/my-project/operations/'
+ 'operation-1234567890\n',
+ self.gce._serialize_batch_api_request(base_path, request))
+
+ def test_parse_batch_api_response(self):
+ response = (
+ 'HTTP/1.1 200 OK\n'
+ 'Content-Type: application/json; charset=UTF-8\n'
+ 'Content-Length: 51\n\n'
+ '{"kind": "compute#instance", "name": "my-instance"}')
+ parsed = self.gce._parse_batch_api_response(response)
+
+ self.assertTrue(
+ isinstance(parsed, gce_base.GoogleComputeEngineBase.BATCH_RESPONSE))
+ self.assertEqual(200, parsed.response.status)
+ self.assertEqual('OK', parsed.response.reason)
+ self.assertEqual(
+ '{"kind": "compute#instance", "name": "my-instance"}',
+ parsed.body)
+
+ def test_send_batch_request_too_many(self):
+ request = gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET', 'my-project/instances/my-instance', None, None)
+ self.assertRaises(ValueError,
+ self.gce._send_batch_request,
+ ([request] * (gce_base.MAX_BATCH_SIZE + 1)))
+
+ def test_send_batch_request(self):
+ def parse_headers(headers):
+ header_dict = {}
+ for header in headers.split('\n'):
+ if not header: continue
+ name, value = header.split(': ', 1)
+ header_dict[name] = value
+ return header_dict
+
+ def mock_send_request(path, method, body, content_type):
+ self.assertEqual('POST', method)
+ self.assertEqual('https://www.googleapis.com/batch', path)
+ match = re.match('multipart/mixed; boundary="([^\"]+)"', content_type)
+ self.assertTrue(match)
+ parts = body.split('--{0}'.format(match.group(1)))
+ self.assertEqual(gce_base.MAX_BATCH_SIZE + 2, len(parts))
+ self.assertEqual('', parts[0])
+ self.assertEqual('--', parts[-1])
+ parts = parts[1:-1]
+
+ responses = []
+ for part in parts:
+ headers, payload = part.split('\n\n', 1)
+ headers = parse_headers(headers)
+ self.assertEqual('application/http', headers['Content-Type'])
+ content_id = headers['Content-ID']
+ self.assertTrue(content_id.startswith('<') and content_id.endswith('>'))
+ content_id = content_id[1:-1]
+
+ http_headers = payload.split('\n\n', 1)[0]
+ split = http_headers.split('\n', 1) # Try to split off the http command
+ http_request = split[0]
+ if len(split) > 1:
+ headers = parse_headers(split[1])
+
+ verb, path = http_request.split(' ')
+ self.assertEqual('GET', verb)
+
+ name = re.match('.*/([^/]+)', path).group(1)
+ payload = '{{ "kind": "compute#instance", "name": "{0}" }}'.format(name)
+
+ msg = MIMENonMultipart('application', 'http')
+ msg.add_header('Content-ID', '<response-{0}>'.format(content_id))
+ msg.set_payload(
+ 'HTTP/1.1 200 OK\n'
+ 'Content-Type: application/json; charset=UTF-8\n'
+ 'Content-Length: {0}\n\n'
+ '{1}'.format(len(payload), payload))
+ responses.append(msg)
+
+ random.shuffle(responses)
+ response = gce_base._BatchApiRequest()
+ for r in responses:
+ response.attach(r)
+
+ response_string = response.as_string()
+ boundary = response.get_boundary()
+ response = httplib2.Response({
+ 'content-type': 'multipart/mixed; boundary="{0}"'.format(boundary),
+ 'status': 200,
+ 'reason': 'OK'})
+
+ return response, response_string
+
+ self.gce._send_request = mock_send_request
+ requests = [
+ gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET', 'my-project/instances/my-instance-{0}'.format(i), None, None)
+ for i in xrange(gce_base.MAX_BATCH_SIZE)]
+
+ responses = self.gce._send_batch_request(requests)
+ self.assertEqual(gce_base.MAX_BATCH_SIZE, len(responses))
+
+ for i, response in enumerate(responses):
+ self.assertTrue(isinstance(
+ response, gce_base.GoogleComputeEngineBase.BATCH_RESPONSE))
+ self.assertEqual(200, response.response.status)
+ self.assertEqual('OK', response.response.reason)
+
+ instance = json.loads(response.body)
+ self.assertEqual('my-instance-{0}'.format(i), instance['name'])
+ self.assertEqual('compute#instance', instance['kind'])
+
+ def test_execute_batch_request_big(self):
+ def mock_send_batch_request(batch):
+ self.assertTrue(len(batch) <= gce_base.MAX_BATCH_SIZE)
+ responses = []
+ for request in batch:
+ response_body = '{{"kind": "compute#instance", "name": "{0}"}}'.format(
+ re.match('.*/([^/]+)', request.url).group(1))
+ responses.append(
+ gce_base.GoogleComputeEngineBase.BATCH_RESPONSE(
+ httplib2.Response({
+ 'content-type': 'application/json',
+ 'status': 200,
+ 'reason': 'OK'}),
+ response_body))
+ return responses
+
+ self.gce._send_batch_request = mock_send_batch_request
+
+ requests_count = int(2.5 * gce_base.MAX_BATCH_SIZE)
+ requests = [
+ gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET', 'my-project/instandes/my-instance-{0}'.format(i), None, None)
+ for i in xrange(requests_count)]
+
+ responses = self.gce._execute_batch_request(requests)
+ self.assertEqual(requests_count, len(responses))
+ for i, response in enumerate(responses):
+ instance = json.loads(response.body)
+ self.assertTrue('my-instance-{0}'.format(i), instance['name'])
+
+ def test_execute_batch_request_auth(self):
+ retry_count = [0] # Mutable closure
+
+ class MockCredentials(object):
+ def __init__(self):
+ self.refresh_called = 0
+
+ def refresh(self, unused_http):
+ self.refresh_called += 1
+
+ def mock_send_batch_request(batch):
+ def validate_pass(unused_i):
+ return True
+
+ def validate_fail(i):
+ # Fail every other request
+ return (i % 2) == 0
+
+ if retry_count[0] == 0:
+ validate = validate_fail
+ elif retry_count[0] == 1:
+ validate = validate_pass
+ else:
+ self.fail('Shoud not retry more than 1x')
+
+ retry_count[0] += 1
+
+ # Process requests, validating credentials.
+ responses = []
+ for i, request in enumerate(batch):
+ if validate(i):
+ http_body = '{{"kind": "compute#instance", "name": "{0}"}}'.format(
+ re.match('.*/([^/]+)', request.url).group(1))
+ http_response = httplib2.Response({
+ 'content-type': 'application/json',
+ 'status': 200,
+ 'reason': 'OK'})
+ else:
+ http_body = '{"error": "Invalid credentials"}'
+ http_response = httplib2.Response({
+ 'content-type': 'application/json',
+ 'status': 401,
+ 'reason': 'Invalid Credentials'
+ })
+
+ responses.append(gce_base.GoogleComputeEngineBase.BATCH_RESPONSE(
+ http_response, http_body))
+
+ return responses
+
+ credentials = MockCredentials()
+ gce = gce_base.GoogleComputeEngineBase(
+ credentials,
+ base_url='https://www.googleapis.com/compute/v1/projects/')
+
+ gce._send_batch_request = mock_send_batch_request
+
+ requests_count = 100
+ requests = [
+ gce_base.GoogleComputeEngineBase.API_REQUEST(
+ 'GET', 'my-project/instandes/my-instance-{0}'.format(i), None, None)
+ for i in xrange(requests_count)]
+
+ responses = gce._execute_batch_request(requests)
+ self.assertEqual(requests_count, len(responses))
+ for i, response in enumerate(responses):
+ instance = json.loads(response.body)
+ self.assertTrue('my-instance-{0}'.format(i), instance['name'])
+
+ self.assertEqual(1, credentials.refresh_called)
+
+ def test_wait_for_list(self):
+ def mock_sleep(unused_seconds):
+ pass
+
+ def mock_execute_batch_request(requests):
+ def complete(i):
+ return i % 2 == 0 # Half of operations complete every turn.
+
+ responses = []
+ for i, request in enumerate(requests):
+ self.assertFalse(request.body)
+
+ status = 200
+ reason = 'OK'
+ body = ''
+
+ if complete(i):
+ info = re.match('.*/([^/]+)', request.url).group(1)
+ if info == 'valid':
+ body = {
+ 'kind': 'compute#operation',
+ 'status': 'DONE',
+ 'targetLink': ('https://www.googleapis.com/compute/v1/'
+ 'projects/my-p/instances/instance')
+ }
+ elif info == 'error':
+ body = {
+ 'kind': 'compute#operation',
+ 'status': 'DONE',
+ 'error': 'Error',
+ 'httpErrorStatusCode': 404,
+ 'httpErrorMessage': 'Resource was not found.'
+ }
+ elif info == 'httperror':
+ status = 404
+ reason = 'Not Found'
+ body = {'error': 'Not Found'}
+ elif info == 'nobody':
+ pass
+ elif info == 'badjson':
+ body = 'this is not json'
+ elif info == 'noselflink':
+ body = {'kind': 'compute#operation'}
+ elif info == 'nokind':
+ body = {}
+ elif info == 'instance':
+ body = {'kind': 'compute#instance', 'name': 'foo'}
+ else:
+ self.fail('Unexpected info {0}'.format(info))
+ else:
+ body = {
+ 'kind': 'compute#operation',
+ 'selfLink': '{0}'.format(request.url)
+ }
+
+ if isinstance(body, dict):
+ body = json.dumps(body)
+
+ response = httplib2.Response({'status': status})
+ response.reason = reason
+ responses.append(
+ gce_base.GoogleComputeEngineBase.BATCH_RESPONSE(response, body))
+
+ return responses
+
+ # Only the selfLink is sent to the server so it encodes all states.
+ operations = [
+ {'selfLink': 'nokind'},
+ {'kind': 'compute#operation', 'selfLink': 'valid'},
+ {'kind': 'compute#operation', 'selfLink': 'error'},
+ {'kind': 'compute#operation', 'selfLink': 'httperror'},
+ {'kind': 'compute#operation', 'selfLink': 'nobody'},
+ {'kind': 'compute#operation', 'selfLink': 'badjson'},
+ {'kind': 'compute#operation', 'selfLink': 'noselflink'},
+ {'kind': 'compute#operation', 'selfLink': 'nokind'},
+ ]
+
+ # Complete selfLinks to be URLs
+ for o in operations:
+ o['selfLink'] = ('https://www.googleapis.com/compute/v1/projects/my-p/'
+ 'operations/' + o['selfLink'])
+ operations = 100 * operations
+
+ self.gce._execute_batch_request = mock_execute_batch_request
+ self.gce._sleep = mock_sleep
+
+ responses = self.gce._wait_for_list(operations)
+ self.assertEqual(len(operations), len(responses))
+
+ for operation, response in itertools.izip(operations, responses):
+ info = re.match('.*/([^/]+)', operation['selfLink']).group(1)
+
+ if info == 'valid':
+ self.assertTrue(isinstance(response, dict))
+ self.assertEqual('compute#instance', response['kind'])
+ elif info == 'error':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(404, response.status)
+ self.assertEqual('Resource was not found.', response.reason)
+ elif info == 'httperror':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(404, response.status)
+ self.assertEqual('Not Found', response.reason)
+ elif info == 'nobody':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(400, response.status)
+ self.assertEqual('Server returned an empty resource as a response.',
+ response.reason)
+ elif info == 'badjson':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(400, response.status)
+ self.assertEqual('Server returned invalid JSON response.',
+ response.reason)
+ elif info == 'noselflink':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(400, response.status)
+ self.assertEqual('Operation is missing selfLink property.',
+ response.reason)
+ elif info == 'nokind':
+ self.assertTrue(isinstance(response, gce_base.GoogleComputeEngineError))
+ self.assertEqual(400, response.status)
+ self.assertEqual('Server returned invalid resource.', response.reason)
+ else:
+ self.fail('Unexpected info {0}'.format(info))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libcloud/common/gcelib/gce_util.py b/libcloud/common/gcelib/gce_util.py
new file mode 100644
index 0000000..736cb48
--- /dev/null
+++ b/libcloud/common/gcelib/gce_util.py
@@ -0,0 +1,187 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An simple utilites for use with Google Compute Engine library."""
+
+import collections
+import ConfigParser
+import datetime
+import json
+import os
+import urllib
+
+
+import oauth2client.client
+import oauth2client.file
+import oauth2client.tools
+import gflags
+
+gflags.FLAGS.auth_local_webserver = False
+
+# Credential and configuration files.
+GCE_CREDENTIALS_FILE = '~/.gce.credentials'
+DEFAULT_GCE_CONFIG_FILE = '~/.gce.config'
+
+# Config file section name.
+GCE_CONFIG_SECTION = 'gce_config'
+
+
+GceDefaults = collections.namedtuple(
+ 'GceDefaults', ('project', 'image', 'machine_type', 'network', 'zone'))
+
+
+def get_credentials():
+ """Returns OAuth2 credentials for use with Google Compute Engine Api.
+
+ Loads the credentials from the credentials file. If the credentials are
+ missing or invalid, performs the OAuth2 authentication flow.
+
+ Returns:
+ oauth2client credentials object to use with the GoogleComputeEngine Api.
+ """
+ storage = oauth2client.file.Storage(os.path.expanduser(GCE_CREDENTIALS_FILE))
+ credentials = storage.get()
+
+
+ if credentials is None or credentials.invalid:
+ flow = oauth2client.client.OAuth2WebServerFlow(
+ client_id='1025389682001.apps.googleusercontent.com',
+ client_secret='xslsVXhA7C8aOfSfb6edB6p6',
+ scope='https://www.googleapis.com/auth/compute',
+ user_agent='google-compute-engine-demo/0.1')
+ credentials = oauth2client.tools.run(flow, storage)
+ return credentials
+
+
+class ServiceAccountCredentials(oauth2client.client.OAuth2Credentials):
+ """Credentials object that uses service account scopes inside an instance."""
+
+ def __init__(self, scopes='https://www.googleapis.com/auth/compute'):
+ self.scopes = scopes
+ access_token, token_expiry = self._internal_refresh()
+ oauth2client.client.OAuth2Credentials.__init__(self, access_token, None,
+ None, None, token_expiry,
+ None, None)
+
+ def _refresh(self, _):
+ self.access_token, self.token_expiry = self._internal_refresh()
+
+ def _internal_refresh(self):
+ url = ('http://metadata/0.1/meta-data/service-accounts/default/'
+ 'acquire?scopes=' + self.scopes)
+ data = json.loads(urllib.urlopen(url).read())
+ return (data['accessToken'],
+ datetime.datetime.utcfromtimestamp(data['expiresAt']))
+
+
+def get_defaults(config_file=DEFAULT_GCE_CONFIG_FILE):
+ """Loads the default values to use with the GoogleComputeEngine API.
+
+ The default values are loaded from the configuration file.
+
+ Args:
+ config_file: The path to the configuration file.
+
+ Returns:
+ The GceDefaults named tuple with the default values.
+ """
+
+ def get_option(cfg, option, default=None):
+ if cfg.has_option(GCE_CONFIG_SECTION, option):
+ return cfg.get(GCE_CONFIG_SECTION, option)
+ return default
+
+ config = ConfigParser.RawConfigParser()
+ config.read(os.path.expanduser(config_file))
+ return GceDefaults(
+ get_option(config, 'project'),
+ get_option(config, 'image'),
+ get_option(config, 'machine_type'),
+ get_option(config, 'network', 'default'),
+ get_option(config, 'zone'))
+
+
+def build_config(api):
+ """Creates a new configuration file.
+
+ The process is interactive: it requires input from the user from standard in.
+
+ Args:
+ api: An instance of GoogleComputeEngine that can be used to query images,
+ zones, the user's projects, etc...
+
+ Returns:
+ The location to which the configuration file should be saved.
+ """
+
+ def choose(iterable, name):
+ """Prompts the user to choose an item from the elements of the iterable."""
+ items = sorted(iterable, key=lambda i: i.name)
+ choices = [str(i) for i in range(1, len(items) + 1)]
+ i = 1
+ for item in items:
+ print '{0}: {1}'.format(i, item.name)
+ i += 1
+ while True:
+ choice = raw_input('Which of the {0} above do you want as your default? '
+ .format(name))
+ if choice in choices:
+ break
+ else:
+ print 'Oops! Please select one of [{0}].'.format(' '.join(choices))
+ return items[int(choice) - 1].name
+
+ print 'I\'m going to help you create a new configuration file.'
+ print 'If you get tired of me, hit EOF and I will go away nicely.'
+
+ try:
+ config_file = raw_input(
+ 'Choose a location for the config file (enter nothing for {0}): '
+ .format(DEFAULT_GCE_CONFIG_FILE))
+ config_file = config_file or DEFAULT_GCE_CONFIG_FILE
+
+ while True:
+ project = raw_input('What is the name of your project? ')
+ try:
+ api.get_project(project=project)
+ break
+ except ValueError:
+ print 'Oops! I couldn\'t find your project. Try again!'
+
+ image = choose(api.all_images(project='google'), 'images')
+ machine_type = choose(api.all_machine_types(project=project),
+ 'machine types')
+ zone = choose(api.all_zones(project=project), 'zones')
+ except EOFError:
+ print
+ print ('Perhaps we can try creating your configuration file some other '
+ 'time.')
+ return None
+
+ # Writes the configuration to the config file.
+ config = ConfigParser.RawConfigParser()
+ config.add_section(GCE_CONFIG_SECTION)
+ config.set(GCE_CONFIG_SECTION, 'project', project)
+ config.set(GCE_CONFIG_SECTION, 'image', 'projects/google/images/' + image)
+ config.set(GCE_CONFIG_SECTION, 'machine_type', machine_type)
+ config.set(GCE_CONFIG_SECTION, 'zone', zone)
+ with open(os.path.expanduser(config_file), 'w') as f:
+ config.write(f)
+
+ print 'Your configuration has been saved to {0}.'.format(config_file)
+ print 'Feel free to edit the file or re-run config() in the future.'
+
+ return config_file
diff --git a/libcloud/common/gcelib/gce_v1beta12.py b/libcloud/common/gcelib/gce_v1beta12.py
new file mode 100644
index 0000000..3ff43de
--- /dev/null
+++ b/libcloud/common/gcelib/gce_v1beta12.py
@@ -0,0 +1,5893 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generated code for the Compute Engine API.
+
+This generated code is for version v1beta12 of the API.
+
+Documentation can be found at:
+ https://developers.google.com/compute/docs/reference/v1beta12
+"""
+
+import gce_base
+import json
+
+
+class AccessConfig(object):
+ """Generated class AccessConfig."""
+ __slots__ = ['name', 'natIP']
+
+ def __init__(self, name=None, natIP=None):
+ self.name = name
+ self.natIP = natIP
+
+ def __str__(self):
+ return '<AccessConfig instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(AccessConfig.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#accessConfig':
+ raise ValueError('Cannot load AccessConfig from {0}.'.format(kind))
+
+ return AccessConfig(
+ resource.get('name'),
+ resource.get('natIP'))
+
+ @staticmethod
+ def to_json(value):
+ result = {
+ 'type': 'ONE_TO_ONE_NAT',
+ 'kind': 'compute#accessConfig'
+ }
+ if isinstance(value, AccessConfig):
+ if value.name is not None:
+ result['name'] = value.name
+ if value.natIP is not None:
+ result['natIP'] = value.natIP
+ elif isinstance(value, dict):
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('natIP')
+ if temp is not None:
+ result['natIP'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [AccessConfig.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [AccessConfig.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, AccessConfig):
+ return [AccessConfig.to_json(value)]
+
+
+class AttachedDisk(object):
+ """Generated class AttachedDisk."""
+ __slots__ = ['type', 'deleteOnTerminate', 'deviceName', 'index', 'mode', 'source']
+
+ def __init__(self, type=None, deleteOnTerminate=None, deviceName=None, index=None, mode=None, source=None):
+ self.type = type
+ self.deleteOnTerminate = deleteOnTerminate
+ self.deviceName = deviceName
+ self.index = index
+ self.mode = mode
+ self.source = source
+
+ def __str__(self):
+ return '<AttachedDisk instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(AttachedDisk.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#attachedDisk':
+ raise ValueError('Cannot load AttachedDisk from {0}.'.format(kind))
+
+ return AttachedDisk(
+ resource.get('type'),
+ resource.get('deleteOnTerminate'),
+ resource.get('deviceName'),
+ resource.get('index'),
+ resource.get('mode'),
+ resource.get('source'))
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#attachedDisk'}
+ if isinstance(value, AttachedDisk):
+ if value.type is not None:
+ result['type'] = value.type
+ if value.deleteOnTerminate is not None:
+ result['deleteOnTerminate'] = value.deleteOnTerminate
+ if value.deviceName is not None:
+ result['deviceName'] = value.deviceName
+ if value.index is not None:
+ result['index'] = value.index
+ if value.mode is not None:
+ result['mode'] = value.mode
+ if value.source is not None:
+ result['source'] = (gce._normalize(project, 'disks', value.source) if (gce is not None and project is not None) else value.source)
+ elif isinstance(value, dict):
+ temp = value.get('type')
+ if temp is not None:
+ result['type'] = temp
+ temp = value.get('deleteOnTerminate')
+ if temp is not None:
+ result['deleteOnTerminate'] = temp
+ temp = value.get('deviceName')
+ if temp is not None:
+ result['deviceName'] = temp
+ temp = value.get('index')
+ if temp is not None:
+ result['index'] = temp
+ temp = value.get('mode')
+ if temp is not None:
+ result['mode'] = temp
+ temp = value.get('source')
+ if temp is not None:
+ result['source'] = (gce._normalize(project, 'disks', temp) if (gce is not None and project is not None) else temp)
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [AttachedDisk.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [AttachedDisk.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, AttachedDisk):
+ return [AttachedDisk.to_json(value, gce, project)]
+
+
+class Disk(object):
+ """Generated class Disk."""
+ __slots__ = ['sizeGb', 'zone', 'description', 'name', 'sourceSnapshot', 'sourceSnapshotId', 'status', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, sizeGb=None, zone=None, description=None, name=None, sourceSnapshot=None, sourceSnapshotId=None, status=None):
+ self.__gce = None
+ self.__project = None
+ self.sizeGb = sizeGb
+ self.zone = zone
+ self.description = description
+ self.name = name
+ self.sourceSnapshot = sourceSnapshot
+ self.sourceSnapshotId = sourceSnapshotId
+ self.status = status
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Disk instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Disk.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#disk':
+ raise ValueError('Cannot load Disk from {0}.'.format(kind))
+
+ result = Disk(
+ resource.get('sizeGb'),
+ resource.get('zone'),
+ resource.get('description'),
+ resource.get('name'),
+ resource.get('sourceSnapshot'),
+ resource.get('sourceSnapshotId'),
+ resource.get('status'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#disk'}
+ if isinstance(value, Disk):
+ if value.sizeGb is not None:
+ result['sizeGb'] = value.sizeGb
+ if value.zone is not None:
+ result['zone'] = (gce._normalize(project, 'zones', value.zone) if (gce is not None and project is not None) else value.zone)
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.sourceSnapshot is not None:
+ result['sourceSnapshot'] = value.sourceSnapshot
+ if value.sourceSnapshotId is not None:
+ result['sourceSnapshotId'] = value.sourceSnapshotId
+ if value.status is not None:
+ result['status'] = value.status
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('sizeGb')
+ if temp is not None:
+ result['sizeGb'] = temp
+ temp = value.get('zone')
+ if temp is not None:
+ result['zone'] = (gce._normalize(project, 'zones', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('sourceSnapshot')
+ if temp is not None:
+ result['sourceSnapshot'] = temp
+ temp = value.get('sourceSnapshotId')
+ if temp is not None:
+ result['sourceSnapshotId'] = temp
+ temp = value.get('status')
+ if temp is not None:
+ result['status'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Disk.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Disk.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Disk):
+ return [Disk.to_json(value, gce, project)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_disk(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_disk(self, project)
+
+ def insert(self, sizeGb=None, zone=None, description=None, name=None, project=None, sourceSnapshot=None, sourceSnapshotId=None, status=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_disk(self, sizeGb, zone, description, name, project, sourceSnapshot, sourceSnapshotId, status, blocking)
+
+
+class DiskList(gce_base.ListObjectBase):
+ """Generated class DiskList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<DiskList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(DiskList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#diskList':
+ raise ValueError('Cannot load DiskList from {0}.'.format(kind))
+
+ result = DiskList(
+ Disk.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#diskList'}
+ if isinstance(value, DiskList):
+ if value.items is not None:
+ result['items'] = Disk.array_to_json(value.items, gce, project)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Disk.array_to_json(temp, gce, project)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Firewall(object):
+ """Generated class Firewall."""
+ __slots__ = ['network', 'allowed', 'sourceRanges', 'sourceTags', 'targetTags', 'description', 'name', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None):
+ self.__gce = None
+ self.__project = None
+ self.network = network
+ self.allowed = allowed
+ self.sourceRanges = sourceRanges
+ self.sourceTags = sourceTags
+ self.targetTags = targetTags
+ self.description = description
+ self.name = name
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Firewall instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Firewall.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#firewall':
+ raise ValueError('Cannot load Firewall from {0}.'.format(kind))
+
+ result = Firewall(
+ resource.get('network'),
+ _Allowed.array_from_json(resource.get('allowed'), gce),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('sourceRanges')),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('sourceTags')),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('targetTags')),
+ resource.get('description'),
+ resource.get('name'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#firewall'}
+ if isinstance(value, Firewall):
+ if value.network is not None:
+ result['network'] = (gce._normalize(project, 'networks', value.network) if (gce is not None and project is not None) else value.network)
+ if value.allowed is not None:
+ result['allowed'] = _Allowed.array_to_json(value.allowed)
+ if value.sourceRanges is not None:
+ result['sourceRanges'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.sourceRanges)
+ if value.sourceTags is not None:
+ result['sourceTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.sourceTags)
+ if value.targetTags is not None:
+ result['targetTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.targetTags)
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('network')
+ if temp is not None:
+ result['network'] = (gce._normalize(project, 'networks', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('allowed')
+ if temp is not None:
+ result['allowed'] = _Allowed.array_to_json(temp)
+ temp = value.get('sourceRanges')
+ if temp is not None:
+ result['sourceRanges'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('sourceTags')
+ if temp is not None:
+ result['sourceTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('targetTags')
+ if temp is not None:
+ result['targetTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Firewall.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Firewall.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Firewall):
+ return [Firewall.to_json(value, gce, project)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_firewall(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_firewall(self, project)
+
+ def insert(self, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_firewall(self, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+
+ def patch(self, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.patch_firewall(self, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+
+ def update(self, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.update_firewall(self, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+
+
+class FirewallList(gce_base.ListObjectBase):
+ """Generated class FirewallList."""
+ __slots__ = ['id', 'items', 'nextPageToken', 'selfLink']
+
+ def __init__(self, id=None, items=None, nextPageToken=None, selfLink=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.id = id
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.selfLink = selfLink
+
+ def __str__(self):
+ return '<FirewallList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(FirewallList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#firewallList':
+ raise ValueError('Cannot load FirewallList from {0}.'.format(kind))
+
+ return FirewallList(
+ resource.get('id'),
+ Firewall.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'),
+ resource.get('selfLink'))
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#firewallList'}
+ if isinstance(value, FirewallList):
+ if value.id is not None:
+ result['id'] = value.id
+ if value.items is not None:
+ result['items'] = Firewall.array_to_json(value.items, gce, project)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Firewall.array_to_json(temp, gce, project)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Image(object):
+ """Generated class Image."""
+ __slots__ = ['rawDiskSource', 'description', 'name', 'rawDiskSha1Checksum', 'preferredKernel', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, rawDiskSource=None, description=None, name=None, rawDiskSha1Checksum=None, preferredKernel=None):
+ self.__gce = None
+ self.__project = None
+ self.rawDiskSource = rawDiskSource
+ self.description = description
+ self.name = name
+ self.rawDiskSha1Checksum = rawDiskSha1Checksum
+ self.preferredKernel = preferredKernel
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Image instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Image.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#image':
+ raise ValueError('Cannot load Image from {0}.'.format(kind))
+
+ rawDiskSource = None
+ rawDiskSha1Checksum = None
+ __temp = resource.get('rawDisk')
+ if __temp is not None:
+ rawDiskSource = __temp.get('source')
+ rawDiskSha1Checksum = __temp.get('sha1Checksum')
+ result = Image(
+ rawDiskSource,
+ resource.get('description'),
+ resource.get('name'),
+ rawDiskSha1Checksum,
+ resource.get('preferredKernel'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {
+ 'sourceType': 'RAW',
+ 'kind': 'compute#image'
+ }
+ if isinstance(value, Image):
+ if value.rawDiskSource is not None or value.rawDiskSha1Checksum is not None:
+ __temp = {'containerType': 'TAR'}
+ if value.rawDiskSource is not None:
+ __temp['source'] = value.rawDiskSource
+ if value.rawDiskSha1Checksum is not None:
+ __temp['sha1Checksum'] = value.rawDiskSha1Checksum
+ result['rawDisk'] = __temp
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.preferredKernel is not None:
+ result['preferredKernel'] = (gce._normalize(project, 'kernels', value.preferredKernel) if (gce is not None and project is not None) else value.preferredKernel)
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ __temp = value.get('rawDisk')
+ if __temp is not None:
+ __temp_1 = {'containerType': 'TAR'}
+ temp = __temp.get('source')
+ if temp is not None:
+ __temp_1['source'] = temp
+ temp = __temp.get('sha1Checksum')
+ if temp is not None:
+ __temp_1['sha1Checksum'] = temp
+ result['rawDisk'] = __temp_1
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('preferredKernel')
+ if temp is not None:
+ result['preferredKernel'] = (gce._normalize(project, 'kernels', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Image.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Image.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Image):
+ return [Image.to_json(value, gce, project)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_image(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_image(self, project)
+
+ def insert(self, rawDiskSource=None, description=None, name=None, rawDiskSha1Checksum=None, preferredKernel=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_image(self, rawDiskSource, description, name, rawDiskSha1Checksum, preferredKernel, project, blocking)
+
+
+class ImageList(gce_base.ListObjectBase):
+ """Generated class ImageList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<ImageList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(ImageList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#imageList':
+ raise ValueError('Cannot load ImageList from {0}.'.format(kind))
+
+ result = ImageList(
+ Image.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#imageList'}
+ if isinstance(value, ImageList):
+ if value.items is not None:
+ result['items'] = Image.array_to_json(value.items, gce, project)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Image.array_to_json(temp, gce, project)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Instance(object):
+ """Generated class Instance."""
+ __slots__ = ['networkInterfaces', 'metadata', 'disks', 'machineType', 'serviceAccounts', 'tags', 'image', 'zone', 'description', 'name', 'creationTimestamp', 'id', 'selfLink', 'status', 'statusMessage', '__gce', '__project']
+
+ def __init__(self, networkInterfaces=None, metadata=None, disks=None, machineType=None, serviceAccounts=None, tags=None, image=None, zone=None, description=None, name=None):
+ self.__gce = None
+ self.__project = None
+ self.networkInterfaces = networkInterfaces
+ self.metadata = metadata
+ self.disks = disks
+ self.machineType = machineType
+ self.serviceAccounts = serviceAccounts
+ self.tags = tags
+ self.image = image
+ self.zone = zone
+ self.description = description
+ self.name = name
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+ self.status = None
+ self.statusMessage = None
+
+ def __str__(self):
+ return '<Instance instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Instance.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#instance':
+ raise ValueError('Cannot load Instance from {0}.'.format(kind))
+
+ result = Instance(
+ NetworkInterface.array_from_json(resource.get('networkInterfaces'), gce),
+ Metadata.from_json(resource.get('metadata'), gce),
+ AttachedDisk.array_from_json(resource.get('disks'), gce),
+ resource.get('machineType'),
+ ServiceAccount.array_from_json(resource.get('serviceAccounts'), gce),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('tags')),
+ resource.get('image'),
+ resource.get('zone'),
+ resource.get('description'),
+ resource.get('name'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.status = resource.get('status')
+ result.statusMessage = resource.get('statusMessage')
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#instance'}
+ if isinstance(value, Instance):
+ if value.networkInterfaces is not None:
+ result['networkInterfaces'] = NetworkInterface.array_to_json(value.networkInterfaces, gce, project)
+ if value.metadata is not None:
+ result['metadata'] = Metadata.to_json(value.metadata)
+ if value.disks is not None:
+ result['disks'] = AttachedDisk.array_to_json(value.disks, gce, project)
+ if value.machineType is not None:
+ result['machineType'] = (gce._normalize(project, 'machine-types', value.machineType) if (gce is not None and project is not None) else value.machineType)
+ if value.serviceAccounts is not None:
+ result['serviceAccounts'] = ServiceAccount.array_to_json(value.serviceAccounts)
+ if value.tags is not None:
+ result['tags'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.tags)
+ if value.image is not None:
+ result['image'] = (gce._normalize(project, 'images', value.image) if (gce is not None and project is not None) else value.image)
+ if value.zone is not None:
+ result['zone'] = (gce._normalize(project, 'zones', value.zone) if (gce is not None and project is not None) else value.zone)
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ if value.status is not None:
+ result['status'] = value.status
+ if value.statusMessage is not None:
+ result['statusMessage'] = value.statusMessage
+ elif isinstance(value, dict):
+ temp = value.get('networkInterfaces')
+ if temp is not None:
+ result['networkInterfaces'] = NetworkInterface.array_to_json(temp, gce, project)
+ temp = value.get('metadata')
+ if temp is not None:
+ result['metadata'] = Metadata.to_json(temp)
+ temp = value.get('disks')
+ if temp is not None:
+ result['disks'] = AttachedDisk.array_to_json(temp, gce, project)
+ temp = value.get('machineType')
+ if temp is not None:
+ result['machineType'] = (gce._normalize(project, 'machine-types', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('serviceAccounts')
+ if temp is not None:
+ result['serviceAccounts'] = ServiceAccount.array_to_json(temp)
+ temp = value.get('tags')
+ if temp is not None:
+ result['tags'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('image')
+ if temp is not None:
+ result['image'] = (gce._normalize(project, 'images', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('zone')
+ if temp is not None:
+ result['zone'] = (gce._normalize(project, 'zones', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ temp = value.get('status')
+ if temp is not None:
+ result['status'] = temp
+ temp = value.get('statusMessage')
+ if temp is not None:
+ result['statusMessage'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Instance.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Instance.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Instance):
+ return [Instance.to_json(value, gce, project)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_instance(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_instance(self, project)
+
+ def insert(self, networkInterfaces=None, metadata=None, disks=None, machineType=None, serviceAccounts=None, tags=None, image=None, zone=None, description=None, name=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_instance(self, networkInterfaces, metadata, disks, machineType, serviceAccounts, tags, image, zone, description, name, project, blocking)
+
+
+class InstanceList(gce_base.ListObjectBase):
+ """Generated class InstanceList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<InstanceList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(InstanceList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#instanceList':
+ raise ValueError('Cannot load InstanceList from {0}.'.format(kind))
+
+ result = InstanceList(
+ Instance.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#instanceList'}
+ if isinstance(value, InstanceList):
+ if value.items is not None:
+ result['items'] = Instance.array_to_json(value.items, gce, project)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Instance.array_to_json(temp, gce, project)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Kernel(object):
+ """Generated class Kernel."""
+ __slots__ = ['description', 'name', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, description=None, name=None):
+ self.__gce = None
+ self.__project = None
+ self.description = description
+ self.name = name
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Kernel instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Kernel.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#kernel':
+ raise ValueError('Cannot load Kernel from {0}.'.format(kind))
+
+ result = Kernel(
+ resource.get('description'),
+ resource.get('name'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#kernel'}
+ if isinstance(value, Kernel):
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Kernel.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Kernel.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Kernel):
+ return [Kernel.to_json(value)]
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_kernel(self, project)
+
+
+class KernelList(gce_base.ListObjectBase):
+ """Generated class KernelList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<KernelList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(KernelList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#kernelList':
+ raise ValueError('Cannot load KernelList from {0}.'.format(kind))
+
+ result = KernelList(
+ Kernel.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#kernelList'}
+ if isinstance(value, KernelList):
+ if value.items is not None:
+ result['items'] = Kernel.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Kernel.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class MachineType(object):
+ """Generated class MachineType."""
+ __slots__ = ['availableZone', 'description', 'ephemeralDisks', 'guestCpus', 'hostCpus', 'imageSpaceGb', 'maximumPersistentDisks', 'maximumPersistentDisksSizeGb', 'memoryMb', 'name', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, availableZone=None, description=None, ephemeralDisks=None, guestCpus=None, hostCpus=None, imageSpaceGb=None, maximumPersistentDisks=None, maximumPersistentDisksSizeGb=None, memoryMb=None, name=None):
+ self.__gce = None
+ self.__project = None
+ self.availableZone = availableZone
+ self.description = description
+ self.ephemeralDisks = ephemeralDisks
+ self.guestCpus = guestCpus
+ self.hostCpus = hostCpus
+ self.imageSpaceGb = imageSpaceGb
+ self.maximumPersistentDisks = maximumPersistentDisks
+ self.maximumPersistentDisksSizeGb = maximumPersistentDisksSizeGb
+ self.memoryMb = memoryMb
+ self.name = name
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<MachineType instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(MachineType.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#machineType':
+ raise ValueError('Cannot load MachineType from {0}.'.format(kind))
+
+ result = MachineType(
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('availableZone')),
+ resource.get('description'),
+ _EphemeralDisk.array_from_json(resource.get('ephemeralDisks'), gce),
+ resource.get('guestCpus'),
+ resource.get('hostCpus'),
+ resource.get('imageSpaceGb'),
+ resource.get('maximumPersistentDisks'),
+ resource.get('maximumPersistentDisksSizeGb'),
+ resource.get('memoryMb'),
+ resource.get('name'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#machineType'}
+ if isinstance(value, MachineType):
+ if value.availableZone is not None:
+ result['availableZone'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.availableZone)
+ if value.description is not None:
+ result['description'] = value.description
+ if value.ephemeralDisks is not None:
+ result['ephemeralDisks'] = _EphemeralDisk.array_to_json(value.ephemeralDisks)
+ if value.guestCpus is not None:
+ result['guestCpus'] = value.guestCpus
+ if value.hostCpus is not None:
+ result['hostCpus'] = value.hostCpus
+ if value.imageSpaceGb is not None:
+ result['imageSpaceGb'] = value.imageSpaceGb
+ if value.maximumPersistentDisks is not None:
+ result['maximumPersistentDisks'] = value.maximumPersistentDisks
+ if value.maximumPersistentDisksSizeGb is not None:
+ result['maximumPersistentDisksSizeGb'] = value.maximumPersistentDisksSizeGb
+ if value.memoryMb is not None:
+ result['memoryMb'] = value.memoryMb
+ if value.name is not None:
+ result['name'] = value.name
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('availableZone')
+ if temp is not None:
+ result['availableZone'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('ephemeralDisks')
+ if temp is not None:
+ result['ephemeralDisks'] = _EphemeralDisk.array_to_json(temp)
+ temp = value.get('guestCpus')
+ if temp is not None:
+ result['guestCpus'] = temp
+ temp = value.get('hostCpus')
+ if temp is not None:
+ result['hostCpus'] = temp
+ temp = value.get('imageSpaceGb')
+ if temp is not None:
+ result['imageSpaceGb'] = temp
+ temp = value.get('maximumPersistentDisks')
+ if temp is not None:
+ result['maximumPersistentDisks'] = temp
+ temp = value.get('maximumPersistentDisksSizeGb')
+ if temp is not None:
+ result['maximumPersistentDisksSizeGb'] = temp
+ temp = value.get('memoryMb')
+ if temp is not None:
+ result['memoryMb'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [MachineType.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [MachineType.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, MachineType):
+ return [MachineType.to_json(value)]
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_machine_type(self, project)
+
+
+class MachineTypeList(gce_base.ListObjectBase):
+ """Generated class MachineTypeList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<MachineTypeList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(MachineTypeList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#machineTypeList':
+ raise ValueError('Cannot load MachineTypeList from {0}.'.format(kind))
+
+ result = MachineTypeList(
+ MachineType.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#machineTypeList'}
+ if isinstance(value, MachineTypeList):
+ if value.items is not None:
+ result['items'] = MachineType.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = MachineType.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Metadata(object):
+ """Generated class Metadata."""
+ __slots__ = ['items']
+
+ def __init__(self, items=None):
+ self.items = items
+
+ def __str__(self):
+ return '<Metadata instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Metadata.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#metadata':
+ raise ValueError('Cannot load Metadata from {0}.'.format(kind))
+
+ return Metadata(
+ _Item.array_from_json(resource.get('items'), gce))
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#metadata'}
+ if isinstance(value, Metadata):
+ if value.items is not None:
+ result['items'] = _Item.array_to_json(value.items)
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = _Item.array_to_json(temp)
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Network(object):
+ """Generated class Network."""
+ __slots__ = ['IPv4Range', 'gatewayIPv4', 'description', 'name', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, IPv4Range=None, gatewayIPv4=None, description=None, name=None):
+ self.__gce = None
+ self.__project = None
+ self.IPv4Range = IPv4Range
+ self.gatewayIPv4 = gatewayIPv4
+ self.description = description
+ self.name = name
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Network instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Network.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#network':
+ raise ValueError('Cannot load Network from {0}.'.format(kind))
+
+ result = Network(
+ resource.get('IPv4Range'),
+ resource.get('gatewayIPv4'),
+ resource.get('description'),
+ resource.get('name'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#network'}
+ if isinstance(value, Network):
+ if value.IPv4Range is not None:
+ result['IPv4Range'] = value.IPv4Range
+ if value.gatewayIPv4 is not None:
+ result['gatewayIPv4'] = value.gatewayIPv4
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('IPv4Range')
+ if temp is not None:
+ result['IPv4Range'] = temp
+ temp = value.get('gatewayIPv4')
+ if temp is not None:
+ result['gatewayIPv4'] = temp
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Network.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Network.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Network):
+ return [Network.to_json(value)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_network(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_network(self, project)
+
+ def insert(self, IPv4Range=None, gatewayIPv4=None, description=None, name=None, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_network(self, IPv4Range, gatewayIPv4, description, name, project, blocking)
+
+
+class NetworkInterface(object):
+ """Generated class NetworkInterface."""
+ __slots__ = ['network', 'accessConfigs', 'networkIP', 'name']
+
+ def __init__(self, network=None, accessConfigs=None, networkIP=None):
+ self.network = network
+ self.accessConfigs = accessConfigs
+ self.networkIP = networkIP
+ self.name = None
+
+ def __str__(self):
+ return '<NetworkInterface instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(NetworkInterface.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#networkInterface':
+ raise ValueError('Cannot load NetworkInterface from {0}.'.format(kind))
+
+ result = NetworkInterface(
+ resource.get('network'),
+ AccessConfig.array_from_json(resource.get('accessConfigs'), gce),
+ resource.get('networkIP'))
+
+ result.name = resource.get('name')
+ return result
+
+ @staticmethod
+ def to_json(value, gce=None, project=None):
+ result = {'kind': 'compute#networkInterface'}
+ if isinstance(value, NetworkInterface):
+ if value.network is not None:
+ result['network'] = (gce._normalize(project, 'networks', value.network) if (gce is not None and project is not None) else value.network)
+ if value.accessConfigs is not None:
+ result['accessConfigs'] = AccessConfig.array_to_json(value.accessConfigs)
+ if value.networkIP is not None:
+ result['networkIP'] = value.networkIP
+ if value.name is not None:
+ result['name'] = value.name
+ elif isinstance(value, dict):
+ temp = value.get('network')
+ if temp is not None:
+ result['network'] = (gce._normalize(project, 'networks', temp) if (gce is not None and project is not None) else temp)
+ temp = value.get('accessConfigs')
+ if temp is not None:
+ result['accessConfigs'] = AccessConfig.array_to_json(temp)
+ temp = value.get('networkIP')
+ if temp is not None:
+ result['networkIP'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [NetworkInterface.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value, gce, project):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [NetworkInterface.to_json(item, gce, project) for item in value]
+ elif isinstance(value, dict) or isinstance(value, NetworkInterface):
+ return [NetworkInterface.to_json(value, gce, project)]
+
+
+class NetworkList(gce_base.ListObjectBase):
+ """Generated class NetworkList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<NetworkList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(NetworkList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#networkList':
+ raise ValueError('Cannot load NetworkList from {0}.'.format(kind))
+
+ result = NetworkList(
+ Network.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#networkList'}
+ if isinstance(value, NetworkList):
+ if value.items is not None:
+ result['items'] = Network.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Network.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Operation(object):
+ """Generated class Operation."""
+ __slots__ = ['name', 'clientOperationId', 'creationTimestamp', 'endTime', 'errorErrors', 'httpErrorMessage', 'httpErrorStatusCode', 'id', 'insertTime', 'operationType', 'progress', 'selfLink', 'startTime', 'status', 'statusMessage', 'targetId', 'targetLink', 'user', '__gce', '__project']
+
+ def __init__(self, name=None, errorErrors=None):
+ self.__gce = None
+ self.__project = None
+ self.name = name
+ self.clientOperationId = None
+ self.creationTimestamp = None
+ self.endTime = None
+ self.errorErrors = errorErrors
+ self.httpErrorMessage = None
+ self.httpErrorStatusCode = None
+ self.id = None
+ self.insertTime = None
+ self.operationType = None
+ self.progress = None
+ self.selfLink = None
+ self.startTime = None
+ self.status = None
+ self.statusMessage = None
+ self.targetId = None
+ self.targetLink = None
+ self.user = None
+
+ def __str__(self):
+ return '<Operation instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Operation.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#operation':
+ raise ValueError('Cannot load Operation from {0}.'.format(kind))
+
+ errorErrors = None
+ __temp = resource.get('error')
+ if __temp is not None:
+ errorErrors = _ErrorDetail.array_from_json(__temp.get('errors'), gce)
+ result = Operation(
+ resource.get('name'),
+ errorErrors)
+
+ result.clientOperationId = resource.get('clientOperationId')
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.endTime = resource.get('endTime')
+ result.httpErrorMessage = resource.get('httpErrorMessage')
+ result.httpErrorStatusCode = resource.get('httpErrorStatusCode')
+ result.id = resource.get('id')
+ result.insertTime = resource.get('insertTime')
+ result.operationType = resource.get('operationType')
+ result.progress = resource.get('progress')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.startTime = resource.get('startTime')
+ result.status = resource.get('status')
+ result.statusMessage = resource.get('statusMessage')
+ result.targetId = resource.get('targetId')
+ result.targetLink = resource.get('targetLink')
+ result.user = resource.get('user')
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#operation'}
+ if isinstance(value, Operation):
+ if value.name is not None:
+ result['name'] = value.name
+ if value.clientOperationId is not None:
+ result['clientOperationId'] = value.clientOperationId
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.endTime is not None:
+ result['endTime'] = value.endTime
+ if value.errorErrors is not None:
+ __temp = {}
+ if value.errorErrors is not None:
+ __temp['errors'] = _ErrorDetail.array_to_json(value.errorErrors)
+ result['error'] = __temp
+ if value.httpErrorMessage is not None:
+ result['httpErrorMessage'] = value.httpErrorMessage
+ if value.httpErrorStatusCode is not None:
+ result['httpErrorStatusCode'] = value.httpErrorStatusCode
+ if value.id is not None:
+ result['id'] = value.id
+ if value.insertTime is not None:
+ result['insertTime'] = value.insertTime
+ if value.operationType is not None:
+ result['operationType'] = value.operationType
+ if value.progress is not None:
+ result['progress'] = value.progress
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ if value.startTime is not None:
+ result['startTime'] = value.startTime
+ if value.status is not None:
+ result['status'] = value.status
+ if value.statusMessage is not None:
+ result['statusMessage'] = value.statusMessage
+ if value.targetId is not None:
+ result['targetId'] = value.targetId
+ if value.targetLink is not None:
+ result['targetLink'] = value.targetLink
+ if value.user is not None:
+ result['user'] = value.user
+ elif isinstance(value, dict):
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('clientOperationId')
+ if temp is not None:
+ result['clientOperationId'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('endTime')
+ if temp is not None:
+ result['endTime'] = temp
+ __temp = value.get('error')
+ if __temp is not None:
+ __temp_1 = {}
+ temp = __temp.get('errors')
+ if temp is not None:
+ __temp_1['errors'] = _ErrorDetail.array_to_json(temp)
+ result['error'] = __temp_1
+ temp = value.get('httpErrorMessage')
+ if temp is not None:
+ result['httpErrorMessage'] = temp
+ temp = value.get('httpErrorStatusCode')
+ if temp is not None:
+ result['httpErrorStatusCode'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('insertTime')
+ if temp is not None:
+ result['insertTime'] = temp
+ temp = value.get('operationType')
+ if temp is not None:
+ result['operationType'] = temp
+ temp = value.get('progress')
+ if temp is not None:
+ result['progress'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ temp = value.get('startTime')
+ if temp is not None:
+ result['startTime'] = temp
+ temp = value.get('status')
+ if temp is not None:
+ result['status'] = temp
+ temp = value.get('statusMessage')
+ if temp is not None:
+ result['statusMessage'] = temp
+ temp = value.get('targetId')
+ if temp is not None:
+ result['targetId'] = temp
+ temp = value.get('targetLink')
+ if temp is not None:
+ result['targetLink'] = temp
+ temp = value.get('user')
+ if temp is not None:
+ result['user'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Operation.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Operation.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Operation):
+ return [Operation.to_json(value)]
+
+ def delete(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_operation(self, project)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_operation(self, project)
+
+
+class OperationList(gce_base.ListObjectBase):
+ """Generated class OperationList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<OperationList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(OperationList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#operationList':
+ raise ValueError('Cannot load OperationList from {0}.'.format(kind))
+
+ result = OperationList(
+ Operation.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#operationList'}
+ if isinstance(value, OperationList):
+ if value.items is not None:
+ result['items'] = Operation.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Operation.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Project(object):
+ """Generated class Project."""
+ __slots__ = ['commonInstanceMetadata', 'description', 'externalIpAddresses', 'name', 'quotas', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, commonInstanceMetadata=None, description=None, externalIpAddresses=None, name=None, quotas=None):
+ self.__gce = None
+ self.__project = None
+ self.commonInstanceMetadata = commonInstanceMetadata
+ self.description = description
+ self.externalIpAddresses = externalIpAddresses
+ self.name = name
+ self.quotas = quotas
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Project instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Project.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#project':
+ raise ValueError('Cannot load Project from {0}.'.format(kind))
+
+ result = Project(
+ Metadata.from_json(resource.get('commonInstanceMetadata'), gce),
+ resource.get('description'),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('externalIpAddresses')),
+ resource.get('name'),
+ _Quota.array_from_json(resource.get('quotas'), gce))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#project'}
+ if isinstance(value, Project):
+ if value.commonInstanceMetadata is not None:
+ result['commonInstanceMetadata'] = Metadata.to_json(value.commonInstanceMetadata)
+ if value.description is not None:
+ result['description'] = value.description
+ if value.externalIpAddresses is not None:
+ result['externalIpAddresses'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.externalIpAddresses)
+ if value.name is not None:
+ result['name'] = value.name
+ if value.quotas is not None:
+ result['quotas'] = _Quota.array_to_json(value.quotas)
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('commonInstanceMetadata')
+ if temp is not None:
+ result['commonInstanceMetadata'] = Metadata.to_json(temp)
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('externalIpAddresses')
+ if temp is not None:
+ result['externalIpAddresses'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('quotas')
+ if temp is not None:
+ result['quotas'] = _Quota.array_to_json(temp)
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ def get(self, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ return gce.get_project(self)
+
+ def set_common_instance_metadata(self, items=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ return gce.set_common_instance_metadata(self, items)
+
+
+class ServiceAccount(object):
+ """Generated class ServiceAccount."""
+ __slots__ = ['email', 'scopes']
+
+ def __init__(self, email=None, scopes=None):
+ self.email = email
+ self.scopes = scopes
+
+ def __str__(self):
+ return '<ServiceAccount instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(ServiceAccount.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#serviceAccount':
+ raise ValueError('Cannot load ServiceAccount from {0}.'.format(kind))
+
+ return ServiceAccount(
+ resource.get('email'),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('scopes')))
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#serviceAccount'}
+ if isinstance(value, ServiceAccount):
+ if value.email is not None:
+ result['email'] = value.email
+ if value.scopes is not None:
+ result['scopes'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.scopes)
+ elif isinstance(value, dict):
+ temp = value.get('email')
+ if temp is not None:
+ result['email'] = temp
+ temp = value.get('scopes')
+ if temp is not None:
+ result['scopes'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [ServiceAccount.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [ServiceAccount.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, ServiceAccount):
+ return [ServiceAccount.to_json(value)]
+
+
+class Snapshot(object):
+ """Generated class Snapshot."""
+ __slots__ = ['sourceDisk', 'description', 'name', 'sourceDiskId', 'creationTimestamp', 'diskSizeGb', 'id', 'selfLink', 'status', '__gce', '__project']
+
+ def __init__(self, sourceDisk=None, description=None, name=None, sourceDiskId=None):
+ self.__gce = None
+ self.__project = None
+ self.sourceDisk = sourceDisk
+ self.description = description
+ self.name = name
+ self.sourceDiskId = sourceDiskId
+ self.creationTimestamp = None
+ self.diskSizeGb = None
+ self.id = None
+ self.selfLink = None
+ self.status = None
+
+ def __str__(self):
+ return '<Snapshot instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Snapshot.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#snapshot':
+ raise ValueError('Cannot load Snapshot from {0}.'.format(kind))
+
+ result = Snapshot(
+ resource.get('sourceDisk'),
+ resource.get('description'),
+ resource.get('name'),
+ resource.get('sourceDiskId'))
+
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.diskSizeGb = resource.get('diskSizeGb')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.status = resource.get('status')
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#snapshot'}
+ if isinstance(value, Snapshot):
+ if value.sourceDisk is not None:
+ result['sourceDisk'] = value.sourceDisk
+ if value.description is not None:
+ result['description'] = value.description
+ if value.name is not None:
+ result['name'] = value.name
+ if value.sourceDiskId is not None:
+ result['sourceDiskId'] = value.sourceDiskId
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.diskSizeGb is not None:
+ result['diskSizeGb'] = value.diskSizeGb
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ if value.status is not None:
+ result['status'] = value.status
+ elif isinstance(value, dict):
+ temp = value.get('sourceDisk')
+ if temp is not None:
+ result['sourceDisk'] = temp
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('sourceDiskId')
+ if temp is not None:
+ result['sourceDiskId'] = temp
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('diskSizeGb')
+ if temp is not None:
+ result['diskSizeGb'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ temp = value.get('status')
+ if temp is not None:
+ result['status'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Snapshot.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Snapshot.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Snapshot):
+ return [Snapshot.to_json(value)]
+
+ def delete(self, project=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.delete_snapshot(self, project, blocking)
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_snapshot(self, project)
+
+ def insert(self, sourceDisk=None, description=None, name=None, project=None, sourceDiskId=None, blocking=True, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.insert_snapshot(self, sourceDisk, description, name, project, sourceDiskId, blocking)
+
+
+class SnapshotList(gce_base.ListObjectBase):
+ """Generated class SnapshotList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<SnapshotList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(SnapshotList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#snapshotList':
+ raise ValueError('Cannot load SnapshotList from {0}.'.format(kind))
+
+ result = SnapshotList(
+ Snapshot.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#snapshotList'}
+ if isinstance(value, SnapshotList):
+ if value.items is not None:
+ result['items'] = Snapshot.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Snapshot.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class Zone(object):
+ """Generated class Zone."""
+ __slots__ = ['description', 'maintenanceWindows', 'name', 'status', 'availableMachineType', 'creationTimestamp', 'id', 'selfLink', '__gce', '__project']
+
+ def __init__(self, description=None, maintenanceWindows=None, name=None, status=None):
+ self.__gce = None
+ self.__project = None
+ self.description = description
+ self.maintenanceWindows = maintenanceWindows
+ self.name = name
+ self.status = status
+ self.availableMachineType = None
+ self.creationTimestamp = None
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<Zone instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(Zone.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#zone':
+ raise ValueError('Cannot load Zone from {0}.'.format(kind))
+
+ result = Zone(
+ resource.get('description'),
+ _MaintenanceWindow.array_from_json(resource.get('maintenanceWindows'), gce),
+ resource.get('name'),
+ resource.get('status'))
+
+ result.availableMachineType = gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('availableMachineType'))
+ result.creationTimestamp = resource.get('creationTimestamp')
+ result.id = resource.get('id')
+ self_link = resource.get('selfLink')
+ result.selfLink = self_link
+ result.__gce = gce
+ result.__project = gce_base.GoogleComputeEngineBase._parse_project(self_link)
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#zone'}
+ if isinstance(value, Zone):
+ if value.description is not None:
+ result['description'] = value.description
+ if value.maintenanceWindows is not None:
+ result['maintenanceWindows'] = _MaintenanceWindow.array_to_json(value.maintenanceWindows)
+ if value.name is not None:
+ result['name'] = value.name
+ if value.status is not None:
+ result['status'] = value.status
+ if value.availableMachineType is not None:
+ result['availableMachineType'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.availableMachineType)
+ if value.creationTimestamp is not None:
+ result['creationTimestamp'] = value.creationTimestamp
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('maintenanceWindows')
+ if temp is not None:
+ result['maintenanceWindows'] = _MaintenanceWindow.array_to_json(temp)
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ temp = value.get('status')
+ if temp is not None:
+ result['status'] = temp
+ temp = value.get('availableMachineType')
+ if temp is not None:
+ result['availableMachineType'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ temp = value.get('creationTimestamp')
+ if temp is not None:
+ result['creationTimestamp'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [Zone.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [Zone.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, Zone):
+ return [Zone.to_json(value)]
+
+ def get(self, project=None, gce=None):
+ if gce is None: gce = self.__gce
+ if gce is None: raise ValueError('Missing gce')
+ if project is None: project = self.__project
+ if project is None: raise ValueError('Missing project')
+ return gce.get_zone(self, project)
+
+
+class ZoneList(gce_base.ListObjectBase):
+ """Generated class ZoneList."""
+ __slots__ = ['items', 'nextPageToken', 'id', 'selfLink']
+
+ def __init__(self, items=None, nextPageToken=None):
+ gce_base.ListObjectBase.__init__(self, items)
+ self.items = items
+ self.nextPageToken = nextPageToken
+ self.id = None
+ self.selfLink = None
+
+ def __str__(self):
+ return '<ZoneList instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(ZoneList.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ kind = resource.get('kind')
+ if kind != 'compute#zoneList':
+ raise ValueError('Cannot load ZoneList from {0}.'.format(kind))
+
+ result = ZoneList(
+ Zone.array_from_json(resource.get('items'), gce),
+ resource.get('nextPageToken'))
+
+ result.id = resource.get('id')
+ result.selfLink = resource.get('selfLink')
+ return result
+
+ @staticmethod
+ def to_json(value):
+ result = {'kind': 'compute#zoneList'}
+ if isinstance(value, ZoneList):
+ if value.items is not None:
+ result['items'] = Zone.array_to_json(value.items)
+ if value.nextPageToken is not None:
+ result['nextPageToken'] = value.nextPageToken
+ if value.id is not None:
+ result['id'] = value.id
+ if value.selfLink is not None:
+ result['selfLink'] = value.selfLink
+ elif isinstance(value, dict):
+ temp = value.get('items')
+ if temp is not None:
+ result['items'] = Zone.array_to_json(temp)
+ temp = value.get('nextPageToken')
+ if temp is not None:
+ result['nextPageToken'] = temp
+ temp = value.get('id')
+ if temp is not None:
+ result['id'] = temp
+ temp = value.get('selfLink')
+ if temp is not None:
+ result['selfLink'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+
+class _Allowed(object):
+ """Generated class _Allowed."""
+ __slots__ = ['IPProtocol', 'ports']
+
+ def __init__(self, IPProtocol=None, ports=None):
+ self.IPProtocol = IPProtocol
+ self.ports = ports
+
+ def __str__(self):
+ return '<_Allowed instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_Allowed.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _Allowed(
+ resource.get('IPProtocol'),
+ gce_base.GoogleComputeEngineBase._json_to_strings(resource.get('ports')))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _Allowed):
+ if value.IPProtocol is not None:
+ result['IPProtocol'] = value.IPProtocol
+ if value.ports is not None:
+ result['ports'] = gce_base.GoogleComputeEngineBase._strings_to_json(value.ports)
+ elif isinstance(value, dict):
+ temp = value.get('IPProtocol')
+ if temp is not None:
+ result['IPProtocol'] = temp
+ temp = value.get('ports')
+ if temp is not None:
+ result['ports'] = gce_base.GoogleComputeEngineBase._strings_to_json(temp)
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_Allowed.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_Allowed.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _Allowed):
+ return [_Allowed.to_json(value)]
+
+
+class _EphemeralDisk(object):
+ """Generated class _EphemeralDisk."""
+ __slots__ = ['diskGb']
+
+ def __init__(self, diskGb=None):
+ self.diskGb = diskGb
+
+ def __str__(self):
+ return '<_EphemeralDisk instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_EphemeralDisk.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _EphemeralDisk(
+ resource.get('diskGb'))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _EphemeralDisk):
+ if value.diskGb is not None:
+ result['diskGb'] = value.diskGb
+ elif isinstance(value, dict):
+ temp = value.get('diskGb')
+ if temp is not None:
+ result['diskGb'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_EphemeralDisk.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_EphemeralDisk.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _EphemeralDisk):
+ return [_EphemeralDisk.to_json(value)]
+
+
+class _ErrorDetail(object):
+ """Generated class _ErrorDetail."""
+ __slots__ = ['code', 'location', 'message']
+
+ def __init__(self, code=None, location=None, message=None):
+ self.code = code
+ self.location = location
+ self.message = message
+
+ def __str__(self):
+ return '<_ErrorDetail instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_ErrorDetail.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _ErrorDetail(
+ resource.get('code'),
+ resource.get('location'),
+ resource.get('message'))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _ErrorDetail):
+ if value.code is not None:
+ result['code'] = value.code
+ if value.location is not None:
+ result['location'] = value.location
+ if value.message is not None:
+ result['message'] = value.message
+ elif isinstance(value, dict):
+ temp = value.get('code')
+ if temp is not None:
+ result['code'] = temp
+ temp = value.get('location')
+ if temp is not None:
+ result['location'] = temp
+ temp = value.get('message')
+ if temp is not None:
+ result['message'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_ErrorDetail.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_ErrorDetail.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _ErrorDetail):
+ return [_ErrorDetail.to_json(value)]
+
+
+class _Item(object):
+ """Generated class _Item."""
+ __slots__ = ['key', 'value']
+
+ def __init__(self, key=None, value=None):
+ self.key = key
+ self.value = value
+
+ def __str__(self):
+ return '<_Item instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_Item.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _Item(
+ resource.get('key'),
+ resource.get('value'))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _Item):
+ if value.key is not None:
+ result['key'] = value.key
+ if value.value is not None:
+ result['value'] = value.value
+ elif isinstance(value, dict):
+ temp = value.get('key')
+ if temp is not None:
+ result['key'] = temp
+ temp = value.get('value')
+ if temp is not None:
+ result['value'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_Item.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_Item.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _Item):
+ return [_Item.to_json(value)]
+
+
+class _MaintenanceWindow(object):
+ """Generated class _MaintenanceWindow."""
+ __slots__ = ['beginTime', 'description', 'endTime', 'name']
+
+ def __init__(self, beginTime=None, description=None, endTime=None, name=None):
+ self.beginTime = beginTime
+ self.description = description
+ self.endTime = endTime
+ self.name = name
+
+ def __str__(self):
+ return '<_MaintenanceWindow instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_MaintenanceWindow.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _MaintenanceWindow(
+ resource.get('beginTime'),
+ resource.get('description'),
+ resource.get('endTime'),
+ resource.get('name'))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _MaintenanceWindow):
+ if value.beginTime is not None:
+ result['beginTime'] = value.beginTime
+ if value.description is not None:
+ result['description'] = value.description
+ if value.endTime is not None:
+ result['endTime'] = value.endTime
+ if value.name is not None:
+ result['name'] = value.name
+ elif isinstance(value, dict):
+ temp = value.get('beginTime')
+ if temp is not None:
+ result['beginTime'] = temp
+ temp = value.get('description')
+ if temp is not None:
+ result['description'] = temp
+ temp = value.get('endTime')
+ if temp is not None:
+ result['endTime'] = temp
+ temp = value.get('name')
+ if temp is not None:
+ result['name'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_MaintenanceWindow.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_MaintenanceWindow.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _MaintenanceWindow):
+ return [_MaintenanceWindow.to_json(value)]
+
+
+class _Quota(object):
+ """Generated class _Quota."""
+ __slots__ = ['limit', 'metric', 'usage']
+
+ def __init__(self, limit=None, metric=None, usage=None):
+ self.limit = limit
+ self.metric = metric
+ self.usage = usage
+
+ def __str__(self):
+ return '<_Quota instance at {0:#x}:\n{1}>'.format(
+ id(self), json.dumps(_Quota.to_json(self), indent=2))
+
+ __repr__ = __str__
+
+ @staticmethod
+ def from_json(resource, gce=None):
+ if resource is None:
+ return None
+ return _Quota(
+ resource.get('limit'),
+ resource.get('metric'),
+ resource.get('usage'))
+
+ @staticmethod
+ def to_json(value):
+ result = {}
+ if isinstance(value, _Quota):
+ if value.limit is not None:
+ result['limit'] = value.limit
+ if value.metric is not None:
+ result['metric'] = value.metric
+ if value.usage is not None:
+ result['usage'] = value.usage
+ elif isinstance(value, dict):
+ temp = value.get('limit')
+ if temp is not None:
+ result['limit'] = temp
+ temp = value.get('metric')
+ if temp is not None:
+ result['metric'] = temp
+ temp = value.get('usage')
+ if temp is not None:
+ result['usage'] = temp
+ else:
+ raise TypeError('Cannot serialize {0} to json.'.format(str(value)))
+ return result
+
+ @staticmethod
+ def array_from_json(input, gce=None):
+ if input is not None:
+ return [_Quota.from_json(item, gce) for item in input]
+
+ @staticmethod
+ def array_to_json(value):
+ if value is None:
+ return []
+ elif isinstance(value, list):
+ return [_Quota.to_json(item) for item in value]
+ elif isinstance(value, dict) or isinstance(value, _Quota):
+ return [_Quota.to_json(value)]
+
+
+class GoogleComputeEngine(gce_base.GoogleComputeEngineBase):
+ """The Google Compute Engine Api class is not thread safe yet."""
+
+ VERSION = 'v1beta12'
+ BASE_URL = 'https://www.googleapis.com/compute/v1beta12/projects/'
+
+ __slots__ = []
+
+ __OBJECT_PARSERS = {
+ 'compute#accessConfig': AccessConfig.from_json,
+ 'compute#attachedDisk': AttachedDisk.from_json,
+ 'compute#disk': Disk.from_json,
+ 'compute#diskList': DiskList.from_json,
+ 'compute#firewall': Firewall.from_json,
+ 'compute#firewallList': FirewallList.from_json,
+ 'compute#image': Image.from_json,
+ 'compute#imageList': ImageList.from_json,
+ 'compute#instance': Instance.from_json,
+ 'compute#instanceList': InstanceList.from_json,
+ 'compute#kernel': Kernel.from_json,
+ 'compute#kernelList': KernelList.from_json,
+ 'compute#machineType': MachineType.from_json,
+ 'compute#machineTypeList': MachineTypeList.from_json,
+ 'compute#metadata': Metadata.from_json,
+ 'compute#network': Network.from_json,
+ 'compute#networkInterface': NetworkInterface.from_json,
+ 'compute#networkList': NetworkList.from_json,
+ 'compute#operation': Operation.from_json,
+ 'compute#operationList': OperationList.from_json,
+ 'compute#project': Project.from_json,
+ 'compute#serviceAccount': ServiceAccount.from_json,
+ 'compute#snapshot': Snapshot.from_json,
+ 'compute#snapshotList': SnapshotList.from_json,
+ 'compute#zone': Zone.from_json,
+ 'compute#zoneList': ZoneList.from_json,
+ }
+
+ def _get_parsers(self):
+ return GoogleComputeEngine.__OBJECT_PARSERS
+
+ def __build_delete_operation_request(self, operation=None, project=None):
+ # Unpacks operation if its type is Operation or dict.
+ if isinstance(operation, Operation):
+ operation = operation.name
+ elif isinstance(operation, dict):
+ operation = operation.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not operation:
+ raise ValueError('operation is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/operations/' + str(operation), None, None)
+
+ def delete_operation(self, operation=None, project=None):
+ """Deletes the specified operation resource.
+
+ Args:
+ operation:
+ Name of the operation resource to delete.
+ Or: Operation to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Operation or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+ """
+ return self._execute(self.__build_delete_operation_request(
+ operation, project), False)
+
+ def delete_operations(self, operations=None, project=None):
+ """Deletes the specified operation resource. List operation.
+
+ Args:
+ operations:
+ List of operations to delete.
+ project:
+ Name of the project scoping this request.
+ """
+ return self._execute_list([
+ self.__build_delete_operation_request(operation, project)
+ for operation in operations], False)
+
+ def __build_get_operation_request(self, operation=None, project=None):
+ # Unpacks operation if its type is Operation or dict.
+ if isinstance(operation, Operation):
+ operation = operation.name
+ elif isinstance(operation, dict):
+ operation = operation.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not operation:
+ raise ValueError('operation is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/operations/' + str(operation), None, None)
+
+ def get_operation(self, operation=None, project=None):
+ """Retrieves the specified operation resource.
+
+ Args:
+ operation:
+ Name of the operation resource to return.
+ Or: Operation to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Operation or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_get_operation_request(
+ operation, project), False)
+
+ def get_operations(self, operations=None, project=None):
+ """Retrieves the specified operation resource. List operation.
+
+ Args:
+ operations:
+ List of operations to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_get_operation_request(operation, project)
+ for operation in operations], False)
+
+ def list_operations(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of operation resources contained within the
+ specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: OperationList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/operations', query_params, None), False)
+
+ def all_operations(self, filter=None, project=None):
+ """Returns an iterator yielding all operations in a project that
+ match specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all operations.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/operations', query_params, Operation)
+
+ def __build_get_kernel_request(self, kernel=None, project=None):
+ # Unpacks kernel if its type is Kernel or dict.
+ if isinstance(kernel, Kernel):
+ kernel = kernel.name
+ elif isinstance(kernel, dict):
+ kernel = kernel.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not kernel:
+ raise ValueError('kernel is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/kernels/' + str(kernel), None, None)
+
+ def get_kernel(self, kernel=None, project=None):
+ """Returns the specified kernel resource.
+
+ Args:
+ kernel:
+ Name of the kernel resource to return.
+ Or: Kernel to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Kernel or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Kernel
+ """
+ return self._execute(self.__build_get_kernel_request(
+ kernel, project), False)
+
+ def get_kernels(self, kernels=None, project=None):
+ """Returns the specified kernel resource. List operation.
+
+ Args:
+ kernels:
+ List of kernels to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Kernel objects.
+ """
+ return self._execute_list([
+ self.__build_get_kernel_request(kernel, project)
+ for kernel in kernels], False)
+
+ def list_kernels(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of kernel resources available to the specified
+ project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: KernelList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/kernels', query_params, None), False)
+
+ def all_kernels(self, filter=None, project=None):
+ """Returns an iterator yielding all kernels in a project that match
+ specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all kernels.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/kernels', query_params, Kernel)
+
+ def __build_delete_disk_request(self, disk=None, project=None, blocking=True):
+ # Unpacks disk if its type is Disk or dict.
+ if isinstance(disk, Disk):
+ disk = disk.name
+ elif isinstance(disk, dict):
+ disk = disk.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not disk:
+ raise ValueError('disk is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/disks/' + str(disk), None, None)
+
+ def delete_disk(self, disk=None, project=None, blocking=True):
+ """Deletes the specified persistent disk resource.
+
+ Args:
+ disk:
+ Name of the persistent disk resource to delete.
+ Or: Disk to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Disk or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_disk_request(
+ disk, project, blocking), blocking)
+
+ def delete_disks(self, disks=None, project=None, blocking=True):
+ """Deletes the specified persistent disk resource. List operation.
+
+ Args:
+ disks:
+ List of disks to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_disk_request(disk, project, blocking)
+ for disk in disks], blocking)
+
+ def __build_get_disk_request(self, disk=None, project=None):
+ # Unpacks disk if its type is Disk or dict.
+ if isinstance(disk, Disk):
+ disk = disk.name
+ elif isinstance(disk, dict):
+ disk = disk.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not disk:
+ raise ValueError('disk is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/disks/' + str(disk), None, None)
+
+ def get_disk(self, disk=None, project=None):
+ """Returns the specified persistent disk resource.
+
+ Args:
+ disk:
+ Name of the persistent disk resource to return.
+ Or: Disk to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Disk or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Disk
+ """
+ return self._execute(self.__build_get_disk_request(
+ disk, project), False)
+
+ def get_disks(self, disks=None, project=None):
+ """Returns the specified persistent disk resource. List operation.
+
+ Args:
+ disks:
+ List of disks to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Disk objects.
+ """
+ return self._execute_list([
+ self.__build_get_disk_request(disk, project)
+ for disk in disks], False)
+
+ def __build_insert_disk_request(self, disk=None, sizeGb=None, zone=None, description=None, name=None, project=None, sourceSnapshot=None, sourceSnapshotId=None, status=None, blocking=True):
+ # Unpacks disk if its type is Disk or dict.
+ if isinstance(disk, Disk):
+ if sizeGb is None:
+ sizeGb = disk.sizeGb
+ if zone is None:
+ zone = disk.zone
+ if description is None:
+ description = disk.description
+ if name is None:
+ name = disk.name
+ if sourceSnapshot is None:
+ sourceSnapshot = disk.sourceSnapshot
+ if sourceSnapshotId is None:
+ sourceSnapshotId = disk.sourceSnapshotId
+ if status is None:
+ status = disk.status
+ elif isinstance(disk, dict):
+ if sizeGb is None:
+ sizeGb = disk.get('sizeGb')
+ if zone is None:
+ zone = disk.get('zone')
+ if description is None:
+ description = disk.get('description')
+ if name is None:
+ name = disk.get('name')
+ if sourceSnapshot is None:
+ sourceSnapshot = disk.get('sourceSnapshot')
+ if sourceSnapshotId is None:
+ sourceSnapshotId = disk.get('sourceSnapshotId')
+ if status is None:
+ status = disk.get('status')
+ elif isinstance(disk, basestring):
+ if name is not None and disk != name:
+ raise ValueError('Conflicting values of disk and name supplied.')
+ name = disk
+
+ # Applies global defaults to missing values.
+ if zone is None:
+ zone = self.default_zone
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not sizeGb:
+ raise ValueError('sizeGb is a required parameter.')
+ if not zone:
+ raise ValueError('zone is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#disk',
+ 'sizeGb': sizeGb,
+ 'zone': (self._normalize(project, 'zones', zone) if (self is not None and project is not None) else zone),
+ 'name': name
+ }
+ if description:
+ request['description'] = description
+ if sourceSnapshot:
+ request['sourceSnapshot'] = sourceSnapshot
+ if sourceSnapshotId:
+ request['sourceSnapshotId'] = sourceSnapshotId
+ if status:
+ request['status'] = status
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/disks', None, json.dumps(request))
+
+ def insert_disk(self, disk=None, sizeGb=None, zone=None, description=None, name=None, project=None, sourceSnapshot=None, sourceSnapshotId=None, status=None, blocking=True):
+ """Creates a persistent disk resource in the specified project using
+ the data included in the request.
+
+ Args:
+ disk:
+ Disk to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Disk or a JSON describing
+ the resource.
+ sizeGb:
+ Size of the persistent disk, specified in GB.
+ zone:
+ URL for the zone where the persistent disk resides;
+ provided by the client when the disk is created. A
+ persistent disk must reside in the same zone as the
+ instance to which it is attached.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ sourceSnapshot:
+ The source snapshot used to create this disk. Once the
+ source snapshot has been deleted from the system, this
+ field will be cleared, and will not be set even if a
+ snapshot with the same name has been re-created.
+ sourceSnapshotId:
+ The 'id' value of the snapshot used to create this disk.
+ This value may be used to determine whether the disk was
+ created from the current or a previous instance of a given
+ disk snapshot.
+ status:
+ The status of disk creation.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_disk_request(
+ disk, sizeGb, zone, description, name, project, sourceSnapshot, sourceSnapshotId, status, blocking), blocking)
+
+ def insert_disks(self, disks=None, sizeGb=None, zone=None, description=None, names=None, project=None, sourceSnapshot=None, sourceSnapshotId=None, status=None, blocking=True):
+ """Creates a persistent disk resource in the specified project using
+ the data included in the request. List operation.
+
+ Args:
+ disks:
+ List of disks to insert.
+ sizeGb:
+ Size of the persistent disk, specified in GB.
+ zone:
+ URL for the zone where the persistent disk resides;
+ provided by the client when the disk is created. A
+ persistent disk must reside in the same zone as the
+ instance to which it is attached.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to insert.
+ project:
+ Name of the project scoping this request.
+ sourceSnapshot:
+ The source snapshot used to create this disk. Once the
+ source snapshot has been deleted from the system, this
+ field will be cleared, and will not be set even if a
+ snapshot with the same name has been re-created.
+ sourceSnapshotId:
+ The 'id' value of the snapshot used to create this disk.
+ This value may be used to determine whether the disk was
+ created from the current or a previous instance of a given
+ disk snapshot.
+ status:
+ The status of disk creation.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_disk_request(disk, sizeGb, zone, description, name, project, sourceSnapshot, sourceSnapshotId, status, blocking)
+ for disk, name in gce_base.GoogleComputeEngineBase._combine(disks, names)], blocking)
+
+ def list_disks(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of persistent disk resources contained within
+ the specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: DiskList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/disks', query_params, None), False)
+
+ def all_disks(self, filter=None, project=None):
+ """Returns an iterator yielding all disks in a project that match
+ specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all disks.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/disks', query_params, Disk)
+
+ def __build_delete_snapshot_request(self, snapshot=None, project=None, blocking=True):
+ # Unpacks snapshot if its type is Snapshot or dict.
+ if isinstance(snapshot, Snapshot):
+ snapshot = snapshot.name
+ elif isinstance(snapshot, dict):
+ snapshot = snapshot.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not snapshot:
+ raise ValueError('snapshot is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/snapshots/' + str(snapshot), None, None)
+
+ def delete_snapshot(self, snapshot=None, project=None, blocking=True):
+ """Deletes the specified persistent disk snapshot resource.
+
+ Args:
+ snapshot:
+ Name of the persistent disk snapshot resource to delete.
+ Or: Snapshot to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Snapshot or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_snapshot_request(
+ snapshot, project, blocking), blocking)
+
+ def delete_snapshots(self, snapshots=None, project=None, blocking=True):
+ """Deletes the specified persistent disk snapshot resource. List
+ operation.
+
+ Args:
+ snapshots:
+ List of snapshots to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_snapshot_request(snapshot, project, blocking)
+ for snapshot in snapshots], blocking)
+
+ def __build_get_snapshot_request(self, snapshot=None, project=None):
+ # Unpacks snapshot if its type is Snapshot or dict.
+ if isinstance(snapshot, Snapshot):
+ snapshot = snapshot.name
+ elif isinstance(snapshot, dict):
+ snapshot = snapshot.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not snapshot:
+ raise ValueError('snapshot is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/snapshots/' + str(snapshot), None, None)
+
+ def get_snapshot(self, snapshot=None, project=None):
+ """Returns the specified persistent disk snapshot resource.
+
+ Args:
+ snapshot:
+ Name of the persistent disk snapshot resource to return.
+ Or: Snapshot to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Snapshot or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Snapshot
+ """
+ return self._execute(self.__build_get_snapshot_request(
+ snapshot, project), False)
+
+ def get_snapshots(self, snapshots=None, project=None):
+ """Returns the specified persistent disk snapshot resource. List
+ operation.
+
+ Args:
+ snapshots:
+ List of snapshots to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Snapshot objects.
+ """
+ return self._execute_list([
+ self.__build_get_snapshot_request(snapshot, project)
+ for snapshot in snapshots], False)
+
+ def __build_insert_snapshot_request(self, snapshot=None, sourceDisk=None, description=None, name=None, project=None, sourceDiskId=None, blocking=True):
+ # Unpacks snapshot if its type is Snapshot or dict.
+ if isinstance(snapshot, Snapshot):
+ if sourceDisk is None:
+ sourceDisk = snapshot.sourceDisk
+ if description is None:
+ description = snapshot.description
+ if name is None:
+ name = snapshot.name
+ if sourceDiskId is None:
+ sourceDiskId = snapshot.sourceDiskId
+ elif isinstance(snapshot, dict):
+ if sourceDisk is None:
+ sourceDisk = snapshot.get('sourceDisk')
+ if description is None:
+ description = snapshot.get('description')
+ if name is None:
+ name = snapshot.get('name')
+ if sourceDiskId is None:
+ sourceDiskId = snapshot.get('sourceDiskId')
+ elif isinstance(snapshot, basestring):
+ if name is not None and snapshot != name:
+ raise ValueError('Conflicting values of snapshot and name supplied.')
+ name = snapshot
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#snapshot',
+ 'name': name
+ }
+ if sourceDisk:
+ request['sourceDisk'] = sourceDisk
+ if description:
+ request['description'] = description
+ if sourceDiskId:
+ request['sourceDiskId'] = sourceDiskId
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/snapshots', None, json.dumps(request))
+
+ def insert_snapshot(self, snapshot=None, sourceDisk=None, description=None, name=None, project=None, sourceDiskId=None, blocking=True):
+ """Creates a persistent disk snapshot resource in the specified
+ project using the data included in the request.
+
+ Args:
+ snapshot:
+ Snapshot to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Snapshot or a JSON
+ describing the resource.
+ sourceDisk:
+ The source disk used to create this snapshot. Once the
+ source disk has been deleted from the system, this field
+ will be cleared, and will not be set even if a disk with
+ the same name has been re-created.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ sourceDiskId:
+ The 'id' value of the disk used to create this snapshot.
+ This value may be used to determine whether the snapshot
+ was taken from the current or a previous instance of a
+ given disk name.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_snapshot_request(
+ snapshot, sourceDisk, description, name, project, sourceDiskId, blocking), blocking)
+
+ def insert_snapshots(self, snapshots=None, sourceDisk=None, description=None, names=None, project=None, sourceDiskId=None, blocking=True):
+ """Creates a persistent disk snapshot resource in the specified
+ project using the data included in the request. List operation.
+
+ Args:
+ snapshots:
+ List of snapshots to insert.
+ sourceDisk:
+ The source disk used to create this snapshot. Once the
+ source disk has been deleted from the system, this field
+ will be cleared, and will not be set even if a disk with
+ the same name has been re-created.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to insert.
+ project:
+ Name of the project scoping this request.
+ sourceDiskId:
+ The 'id' value of the disk used to create this snapshot.
+ This value may be used to determine whether the snapshot
+ was taken from the current or a previous instance of a
+ given disk name.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_snapshot_request(snapshot, sourceDisk, description, name, project, sourceDiskId, blocking)
+ for snapshot, name in gce_base.GoogleComputeEngineBase._combine(snapshots, names)], blocking)
+
+ def list_snapshots(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of persistent disk snapshot resources
+ contained within the specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: SnapshotList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/snapshots', query_params, None), False)
+
+ def all_snapshots(self, filter=None, project=None):
+ """Returns an iterator yielding all snapshots in a project that
+ match specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all snapshots.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/snapshots', query_params, Snapshot)
+
+ def __build_get_zone_request(self, zone=None, project=None):
+ # Unpacks zone if its type is Zone or dict.
+ if isinstance(zone, Zone):
+ zone = zone.name
+ elif isinstance(zone, dict):
+ zone = zone.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not zone:
+ raise ValueError('zone is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/zones/' + str(zone), None, None)
+
+ def get_zone(self, zone=None, project=None):
+ """Returns the specified zone resource.
+
+ Args:
+ zone:
+ Name of the zone resource to return.
+ Or: Zone to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Zone or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Zone
+ """
+ return self._execute(self.__build_get_zone_request(
+ zone, project), False)
+
+ def get_zones(self, zones=None, project=None):
+ """Returns the specified zone resource. List operation.
+
+ Args:
+ zones:
+ List of zones to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Zone objects.
+ """
+ return self._execute_list([
+ self.__build_get_zone_request(zone, project)
+ for zone in zones], False)
+
+ def list_zones(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of zone resources available to the specified
+ project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: ZoneList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/zones', query_params, None), False)
+
+ def all_zones(self, filter=None, project=None):
+ """Returns an iterator yielding all zones in a project that match
+ specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all zones.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/zones', query_params, Zone)
+
+ def add_access_config(self, instance=None, name=None, project=None, network_interface=None, natIP=None, blocking=True):
+ """Adds an access config to an instance's network interface.
+
+ Args:
+ instance:
+ Instance name.
+ name:
+ Name of this access configuration.
+ project:
+ Project name.
+ network_interface:
+ Network interface name.
+ natIP:
+ An external IP address associated with this instance.
+ Specify an unused static IP address available to the
+ project. If left blank, the external IP will be drawn from
+ a shared ephemeral pool.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not instance:
+ raise ValueError('instance is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ if not network_interface:
+ raise ValueError('network_interface is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#accessConfig',
+ 'type': 'ONE_TO_ONE_NAT'
+ }
+ if name:
+ request['name'] = name
+ if natIP:
+ request['natIP'] = natIP
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if network_interface:
+ query_params['network_interface'] = network_interface
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/instances/' + str(instance) + '/add-access-config', query_params, json.dumps(request)), blocking)
+
+ def __build_delete_instance_request(self, instance=None, project=None, blocking=True):
+ # Unpacks instance if its type is Instance or dict.
+ if isinstance(instance, Instance):
+ instance = instance.name
+ elif isinstance(instance, dict):
+ instance = instance.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not instance:
+ raise ValueError('instance is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/instances/' + str(instance), None, None)
+
+ def delete_instance(self, instance=None, project=None, blocking=True):
+ """Deletes the specified instance resource.
+
+ Args:
+ instance:
+ Name of the instance resource to delete.
+ Or: Instance to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Instance or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_instance_request(
+ instance, project, blocking), blocking)
+
+ def delete_instances(self, instances=None, project=None, blocking=True):
+ """Deletes the specified instance resource. List operation.
+
+ Args:
+ instances:
+ List of instances to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_instance_request(instance, project, blocking)
+ for instance in instances], blocking)
+
+ def delete_access_config(self, instance=None, access_config=None, project=None, network_interface=None, blocking=True):
+ """Deletes an access config from an instance's network interface.
+
+ Args:
+ instance:
+ Instance name.
+ access_config:
+ Access config name.
+ project:
+ Project name.
+ network_interface:
+ Network interface name.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not instance:
+ raise ValueError('instance is a required parameter.')
+ if not access_config:
+ raise ValueError('access_config is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ if not network_interface:
+ raise ValueError('network_interface is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if access_config:
+ query_params['access_config'] = access_config
+ if network_interface:
+ query_params['network_interface'] = network_interface
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/instances/' + str(instance) + '/delete-access-config', query_params, None), blocking)
+
+ def __build_get_instance_request(self, instance=None, project=None):
+ # Unpacks instance if its type is Instance or dict.
+ if isinstance(instance, Instance):
+ instance = instance.name
+ elif isinstance(instance, dict):
+ instance = instance.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not instance:
+ raise ValueError('instance is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/instances/' + str(instance), None, None)
+
+ def get_instance(self, instance=None, project=None):
+ """Returns the specified instance resource.
+
+ Args:
+ instance:
+ Name of the instance resource to return.
+ Or: Instance to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Instance or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Instance
+ """
+ return self._execute(self.__build_get_instance_request(
+ instance, project), False)
+
+ def get_instances(self, instances=None, project=None):
+ """Returns the specified instance resource. List operation.
+
+ Args:
+ instances:
+ List of instances to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Instance objects.
+ """
+ return self._execute_list([
+ self.__build_get_instance_request(instance, project)
+ for instance in instances], False)
+
+ def __build_insert_instance_request(self, instance=None, networkInterfaces=None, metadata=None, disks=None, machineType=None, serviceAccounts=None, tags=None, image=None, zone=None, description=None, name=None, project=None, blocking=True):
+ # Unpacks instance if its type is Instance or dict.
+ if isinstance(instance, Instance):
+ if networkInterfaces is None:
+ networkInterfaces = instance.networkInterfaces
+ if metadata is None:
+ metadata = instance.metadata
+ if disks is None:
+ disks = instance.disks
+ if machineType is None:
+ machineType = instance.machineType
+ if serviceAccounts is None:
+ serviceAccounts = instance.serviceAccounts
+ if tags is None:
+ tags = instance.tags
+ if image is None:
+ image = instance.image
+ if zone is None:
+ zone = instance.zone
+ if description is None:
+ description = instance.description
+ if name is None:
+ name = instance.name
+ elif isinstance(instance, dict):
+ if networkInterfaces is None:
+ networkInterfaces = instance.get('networkInterfaces')
+ if metadata is None:
+ metadata = instance.get('metadata')
+ if disks is None:
+ disks = instance.get('disks')
+ if machineType is None:
+ machineType = instance.get('machineType')
+ if serviceAccounts is None:
+ serviceAccounts = instance.get('serviceAccounts')
+ if tags is None:
+ tags = instance.get('tags')
+ if image is None:
+ image = instance.get('image')
+ if zone is None:
+ zone = instance.get('zone')
+ if description is None:
+ description = instance.get('description')
+ if name is None:
+ name = instance.get('name')
+ elif isinstance(instance, basestring):
+ if name is not None and instance != name:
+ raise ValueError('Conflicting values of instance and name supplied.')
+ name = instance
+
+ # Applies global defaults to missing values.
+ if networkInterfaces is None:
+ networkInterfaces = self.default_network_interface
+ if machineType is None:
+ machineType = self.default_machine_type
+ if image is None:
+ image = self.default_image
+ if zone is None:
+ zone = self.default_zone
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not machineType:
+ raise ValueError('machineType is a required parameter.')
+ if not image:
+ raise ValueError('image is a required parameter.')
+ if not zone:
+ raise ValueError('zone is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#instance',
+ 'machineType': (self._normalize(project, 'machine-types', machineType) if (self is not None and project is not None) else machineType),
+ 'image': (self._normalize(project, 'images', image) if (self is not None and project is not None) else image),
+ 'zone': (self._normalize(project, 'zones', zone) if (self is not None and project is not None) else zone),
+ 'name': name
+ }
+ if networkInterfaces:
+ request['networkInterfaces'] = NetworkInterface.array_to_json(networkInterfaces, self, project)
+ if metadata:
+ request['metadata'] = Metadata.to_json(metadata)
+ if disks:
+ request['disks'] = AttachedDisk.array_to_json(disks, self, project)
+ if serviceAccounts:
+ request['serviceAccounts'] = ServiceAccount.array_to_json(serviceAccounts)
+ if tags:
+ request['tags'] = gce_base.GoogleComputeEngineBase._strings_to_json(tags)
+ if description:
+ request['description'] = description
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/instances', None, json.dumps(request))
+
+ def insert_instance(self, instance=None, networkInterfaces=None, metadata=None, disks=None, machineType=None, serviceAccounts=None, tags=None, image=None, zone=None, description=None, name=None, project=None, blocking=True):
+ """Creates an instance resource in the specified project using the
+ data included in the request.
+
+ Args:
+ instance:
+ Instance to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Instance or a JSON
+ describing the resource.
+ networkInterfaces:
+ Array of configurations for this interface. This specifies
+ how this interface is configured to interact with other
+ network services, such as connecting to the internet.
+ Currently, ONE_TO_ONE_NAT is the only access config
+ supported. If there are no accessConfigs specified, then
+ this instance will have no external internet access.
+ metadata:
+ Metadata key/value pairs assigned to this instance.
+ Consists of custom metadata or predefined keys; see
+ Instance documentation for more information.
+ disks:
+ Array of disks associated with this instance. Persistent
+ disks must be created before you can assign them.
+ machineType:
+ URL of the machine type resource describing which machine
+ type to use to host the instance; provided by the client
+ when the instance is created.
+ serviceAccounts:
+ A list of service accounts each with specified scopes, for
+ which access tokens are to be made available to the
+ instance through metadata queries.
+ tags:
+ An optional set of tags applied to this instance. Used to
+ identify valid sources or targets for network firewalls.
+ Provided by the client when the instance is created. Each
+ tag must be 1-63 characters long, and comply with RFC1035.
+ image:
+ An optional URL of the disk image resource to be to be
+ installed on this instance; provided by the client when the
+ instance is created. If not specified, the server will
+ choose a default image.
+ zone:
+ URL of the zone resource describing where this instance
+ should be hosted; provided by the client when the instance
+ is created.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_instance_request(
+ instance, networkInterfaces, metadata, disks, machineType, serviceAccounts, tags, image, zone, description, name, project, blocking), blocking)
+
+ def insert_instances(self, instances=None, networkInterfaces=None, metadata=None, disks=None, machineType=None, serviceAccounts=None, tags=None, image=None, zone=None, description=None, names=None, project=None, blocking=True):
+ """Creates an instance resource in the specified project using the
+ data included in the request. List operation.
+
+ Args:
+ instances:
+ List of instances to insert.
+ networkInterfaces:
+ Array of configurations for this interface. This specifies
+ how this interface is configured to interact with other
+ network services, such as connecting to the internet.
+ Currently, ONE_TO_ONE_NAT is the only access config
+ supported. If there are no accessConfigs specified, then
+ this instance will have no external internet access.
+ metadata:
+ Metadata key/value pairs assigned to this instance.
+ Consists of custom metadata or predefined keys; see
+ Instance documentation for more information.
+ disks:
+ Array of disks associated with this instance. Persistent
+ disks must be created before you can assign them.
+ machineType:
+ URL of the machine type resource describing which machine
+ type to use to host the instance; provided by the client
+ when the instance is created.
+ serviceAccounts:
+ A list of service accounts each with specified scopes, for
+ which access tokens are to be made available to the
+ instance through metadata queries.
+ tags:
+ An optional set of tags applied to this instance. Used to
+ identify valid sources or targets for network firewalls.
+ Provided by the client when the instance is created. Each
+ tag must be 1-63 characters long, and comply with RFC1035.
+ image:
+ An optional URL of the disk image resource to be to be
+ installed on this instance; provided by the client when the
+ instance is created. If not specified, the server will
+ choose a default image.
+ zone:
+ URL of the zone resource describing where this instance
+ should be hosted; provided by the client when the instance
+ is created.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to insert.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_instance_request(instance, networkInterfaces, metadata, disks, machineType, serviceAccounts, tags, image, zone, description, name, project, blocking)
+ for instance, name in gce_base.GoogleComputeEngineBase._combine(instances, names)], blocking)
+
+ def list_instances(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of instance resources contained within the
+ specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: InstanceList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/instances', query_params, None), False)
+
+ def all_instances(self, filter=None, project=None):
+ """Returns an iterator yielding all instances in a project that
+ match specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all instances.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/instances', query_params, Instance)
+
+ def __build_get_machine_type_request(self, machineType=None, project=None):
+ # Unpacks machineType if its type is MachineType or dict.
+ if isinstance(machineType, MachineType):
+ machineType = machineType.name
+ elif isinstance(machineType, dict):
+ machineType = machineType.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not machineType:
+ raise ValueError('machineType is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/machine-types/' + str(machineType), None, None)
+
+ def get_machine_type(self, machineType=None, project=None):
+ """Returns the specified machine type resource.
+
+ Args:
+ machineType:
+ Name of the machine type resource to return.
+ Or: MachineType to use as a template. Other directly
+ provided parameters take precedence and override any values
+ in the template. May be an instance of MachineType or a
+ JSON describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: MachineType
+ """
+ return self._execute(self.__build_get_machine_type_request(
+ machineType, project), False)
+
+ def get_machine_types(self, machineTypes=None, project=None):
+ """Returns the specified machine type resource. List operation.
+
+ Args:
+ machineTypes:
+ List of machinetypes to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of MachineType objects.
+ """
+ return self._execute_list([
+ self.__build_get_machine_type_request(machineType, project)
+ for machineType in machineTypes], False)
+
+ def list_machine_types(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of machine type resources available to the
+ specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: MachineTypeList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/machine-types', query_params, None), False)
+
+ def all_machine_types(self, filter=None, project=None):
+ """Returns an iterator yielding all machineTypes in a project that
+ match specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all machineTypes.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/machine-types', query_params, MachineType)
+
+ def __build_delete_image_request(self, image=None, project=None, blocking=True):
+ # Unpacks image if its type is Image or dict.
+ if isinstance(image, Image):
+ image = image.name
+ elif isinstance(image, dict):
+ image = image.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not image:
+ raise ValueError('image is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/images/' + str(image), None, None)
+
+ def delete_image(self, image=None, project=None, blocking=True):
+ """Deletes the specified image resource.
+
+ Args:
+ image:
+ Name of the image resource to delete.
+ Or: Image to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Image or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_image_request(
+ image, project, blocking), blocking)
+
+ def delete_images(self, images=None, project=None, blocking=True):
+ """Deletes the specified image resource. List operation.
+
+ Args:
+ images:
+ List of images to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_image_request(image, project, blocking)
+ for image in images], blocking)
+
+ def __build_get_image_request(self, image=None, project=None):
+ # Unpacks image if its type is Image or dict.
+ if isinstance(image, Image):
+ image = image.name
+ elif isinstance(image, dict):
+ image = image.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not image:
+ raise ValueError('image is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/images/' + str(image), None, None)
+
+ def get_image(self, image=None, project=None):
+ """Returns the specified image resource.
+
+ Args:
+ image:
+ Name of the image resource to return.
+ Or: Image to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Image or a JSON describing
+ the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Image
+ """
+ return self._execute(self.__build_get_image_request(
+ image, project), False)
+
+ def get_images(self, images=None, project=None):
+ """Returns the specified image resource. List operation.
+
+ Args:
+ images:
+ List of images to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Image objects.
+ """
+ return self._execute_list([
+ self.__build_get_image_request(image, project)
+ for image in images], False)
+
+ def __build_insert_image_request(self, image=None, rawDiskSource=None, description=None, name=None, rawDiskSha1Checksum=None, preferredKernel=None, project=None, blocking=True):
+ # Unpacks image if its type is Image or dict.
+ if isinstance(image, Image):
+ if rawDiskSource is None:
+ rawDiskSource = image.rawDiskSource
+ if description is None:
+ description = image.description
+ if name is None:
+ name = image.name
+ if rawDiskSha1Checksum is None:
+ rawDiskSha1Checksum = image.rawDiskSha1Checksum
+ if preferredKernel is None:
+ preferredKernel = image.preferredKernel
+ elif isinstance(image, dict):
+ __temp = image.get('rawDisk')
+ if __temp is not None:
+ if source is None:
+ source = __temp.get('source')
+ if sha1Checksum is None:
+ sha1Checksum = __temp.get('sha1Checksum')
+ if description is None:
+ description = image.get('description')
+ if name is None:
+ name = image.get('name')
+ if preferredKernel is None:
+ preferredKernel = image.get('preferredKernel')
+ elif isinstance(image, basestring):
+ if name is not None and image != name:
+ raise ValueError('Conflicting values of image and name supplied.')
+ name = image
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#image',
+ 'sourceType': 'RAW',
+ 'name': name
+ }
+ if description:
+ request['description'] = description
+ if preferredKernel:
+ request['preferredKernel'] = (self._normalize(project, 'kernels', preferredKernel) if (self is not None and project is not None) else preferredKernel)
+ if rawDiskSource is not None or rawDiskSha1Checksum is not None:
+ if not rawDiskSource:
+ raise ValueError('rawDiskSource is required parameter')
+ __temp = {
+ 'containerType': 'TAR',
+ 'source': rawDiskSource
+ }
+ if sha1Checksum:
+ __temp['sha1Checksum'] = rawDiskSha1Checksum
+ request['rawDisk'] = __temp
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/images', None, json.dumps(request))
+
+ def insert_image(self, image=None, rawDiskSource=None, description=None, name=None, rawDiskSha1Checksum=None, preferredKernel=None, project=None, blocking=True):
+ """Creates an image resource in the specified project using the data
+ included in the request.
+
+ Args:
+ image:
+ Image to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Image or a JSON describing
+ the resource.
+ rawDiskSource:
+ The full Google Cloud Storage URL where the disk image is
+ stored; provided by the client when the disk image is
+ created.
+ description:
+ Textual description of the resource; provided by the client
+ when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ rawDiskSha1Checksum:
+ An optional SHA1 checksum of the disk image before
+ unpackaging; provided by the client when the disk image is
+ created.
+ preferredKernel:
+ An optional URL of the preferred kernel for use with this
+ disk image. If not specified, a server defined default
+ kernel will be used.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_image_request(
+ image, rawDiskSource, description, name, rawDiskSha1Checksum, preferredKernel, project, blocking), blocking)
+
+ def insert_images(self, images=None, rawDiskSource=None, description=None, names=None, rawDiskSha1Checksum=None, preferredKernel=None, project=None, blocking=True):
+ """Creates an image resource in the specified project using the data
+ included in the request. List operation.
+
+ Args:
+ images:
+ List of images to insert.
+ rawDiskSource:
+ The full Google Cloud Storage URL where the disk image is
+ stored; provided by the client when the disk image is
+ created.
+ description:
+ Textual description of the resource; provided by the client
+ when the resource is created.
+ names:
+ List of names of objects to insert.
+ rawDiskSha1Checksum:
+ An optional SHA1 checksum of the disk image before
+ unpackaging; provided by the client when the disk image is
+ created.
+ preferredKernel:
+ An optional URL of the preferred kernel for use with this
+ disk image. If not specified, a server defined default
+ kernel will be used.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_image_request(image, rawDiskSource, description, name, rawDiskSha1Checksum, preferredKernel, project, blocking)
+ for image, name in gce_base.GoogleComputeEngineBase._combine(images, names)], blocking)
+
+ def list_images(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of image resources available to the specified
+ project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: ImageList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/images', query_params, None), False)
+
+ def all_images(self, filter=None, project=None):
+ """Returns an iterator yielding all images in a project that match
+ specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all images.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/images', query_params, Image)
+
+ def __build_delete_firewall_request(self, firewall=None, project=None, blocking=True):
+ # Unpacks firewall if its type is Firewall or dict.
+ if isinstance(firewall, Firewall):
+ firewall = firewall.name
+ elif isinstance(firewall, dict):
+ firewall = firewall.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not firewall:
+ raise ValueError('firewall is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/firewalls/' + str(firewall), None, None)
+
+ def delete_firewall(self, firewall=None, project=None, blocking=True):
+ """Deletes the specified firewall resource.
+
+ Args:
+ firewall:
+ Name of the firewall resource to delete.
+ Or: Firewall to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Firewall or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_firewall_request(
+ firewall, project, blocking), blocking)
+
+ def delete_firewalls(self, firewalls=None, project=None, blocking=True):
+ """Deletes the specified firewall resource. List operation.
+
+ Args:
+ firewalls:
+ List of firewalls to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_firewall_request(firewall, project, blocking)
+ for firewall in firewalls], blocking)
+
+ def __build_get_firewall_request(self, firewall=None, project=None):
+ # Unpacks firewall if its type is Firewall or dict.
+ if isinstance(firewall, Firewall):
+ firewall = firewall.name
+ elif isinstance(firewall, dict):
+ firewall = firewall.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not firewall:
+ raise ValueError('firewall is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/firewalls/' + str(firewall), None, None)
+
+ def get_firewall(self, firewall=None, project=None):
+ """Returns the specified firewall resource.
+
+ Args:
+ firewall:
+ Name of the firewall resource to return.
+ Or: Firewall to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Firewall or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Firewall
+ """
+ return self._execute(self.__build_get_firewall_request(
+ firewall, project), False)
+
+ def get_firewalls(self, firewalls=None, project=None):
+ """Returns the specified firewall resource. List operation.
+
+ Args:
+ firewalls:
+ List of firewalls to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Firewall objects.
+ """
+ return self._execute_list([
+ self.__build_get_firewall_request(firewall, project)
+ for firewall in firewalls], False)
+
+ def __build_insert_firewall_request(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ # Unpacks firewall if its type is Firewall or dict.
+ if isinstance(firewall, Firewall):
+ if network is None:
+ network = firewall.network
+ if allowed is None:
+ allowed = firewall.allowed
+ if sourceRanges is None:
+ sourceRanges = firewall.sourceRanges
+ if sourceTags is None:
+ sourceTags = firewall.sourceTags
+ if targetTags is None:
+ targetTags = firewall.targetTags
+ if description is None:
+ description = firewall.description
+ if name is None:
+ name = firewall.name
+ elif isinstance(firewall, dict):
+ if network is None:
+ network = firewall.get('network')
+ if allowed is None:
+ allowed = firewall.get('allowed')
+ if sourceRanges is None:
+ sourceRanges = firewall.get('sourceRanges')
+ if sourceTags is None:
+ sourceTags = firewall.get('sourceTags')
+ if targetTags is None:
+ targetTags = firewall.get('targetTags')
+ if description is None:
+ description = firewall.get('description')
+ if name is None:
+ name = firewall.get('name')
+ elif isinstance(firewall, basestring):
+ if name is not None and firewall != name:
+ raise ValueError('Conflicting values of firewall and name supplied.')
+ name = firewall
+
+ # Applies global defaults to missing values.
+ if network is None:
+ network = self.default_network
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not network:
+ raise ValueError('network is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#firewall',
+ 'network': (self._normalize(project, 'networks', network) if (self is not None and project is not None) else network),
+ 'name': name
+ }
+ if allowed:
+ request['allowed'] = _Allowed.array_to_json(allowed)
+ if sourceRanges:
+ request['sourceRanges'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceRanges)
+ if sourceTags:
+ request['sourceTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceTags)
+ if targetTags:
+ request['targetTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(targetTags)
+ if description:
+ request['description'] = description
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/firewalls', None, json.dumps(request))
+
+ def insert_firewall(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ """Creates a firewall resource in the specified project using the
+ data included in the request.
+
+ Args:
+ firewall:
+ Firewall to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Firewall or a JSON
+ describing the resource.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_firewall_request(
+ firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking), blocking)
+
+ def insert_firewalls(self, firewalls=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, names=None, project=None, blocking=True):
+ """Creates a firewall resource in the specified project using the
+ data included in the request. List operation.
+
+ Args:
+ firewalls:
+ List of firewalls to insert.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to insert.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_firewall_request(firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+ for firewall, name in gce_base.GoogleComputeEngineBase._combine(firewalls, names)], blocking)
+
+ def list_firewalls(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of firewall resources available to the
+ specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: FirewallList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/firewalls', query_params, None), False)
+
+ def all_firewalls(self, filter=None, project=None):
+ """Returns an iterator yielding all firewalls in a project that
+ match specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all firewalls.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/firewalls', query_params, Firewall)
+
+ def __build_patch_firewall_request(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ # Unpacks firewall if its type is Firewall or dict.
+ if isinstance(firewall, Firewall):
+ if network is None:
+ network = firewall.network
+ if allowed is None:
+ allowed = firewall.allowed
+ if sourceRanges is None:
+ sourceRanges = firewall.sourceRanges
+ if sourceTags is None:
+ sourceTags = firewall.sourceTags
+ if targetTags is None:
+ targetTags = firewall.targetTags
+ if description is None:
+ description = firewall.description
+ if name is None:
+ name = firewall.name
+ firewall = firewall.name
+ elif isinstance(firewall, dict):
+ if network is None:
+ network = firewall.get('network')
+ if allowed is None:
+ allowed = firewall.get('allowed')
+ if sourceRanges is None:
+ sourceRanges = firewall.get('sourceRanges')
+ if sourceTags is None:
+ sourceTags = firewall.get('sourceTags')
+ if targetTags is None:
+ targetTags = firewall.get('targetTags')
+ if description is None:
+ description = firewall.get('description')
+ if name is None:
+ name = firewall.get('name')
+ firewall = firewall.get('name')
+
+ # Applies global defaults to missing values.
+ if network is None:
+ network = self.default_network
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not firewall:
+ raise ValueError('firewall is a required parameter.')
+ if not network:
+ raise ValueError('network is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#firewall',
+ 'network': (self._normalize(project, 'networks', network) if (self is not None and project is not None) else network),
+ 'name': name
+ }
+ if allowed:
+ request['allowed'] = _Allowed.array_to_json(allowed)
+ if sourceRanges:
+ request['sourceRanges'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceRanges)
+ if sourceTags:
+ request['sourceTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceTags)
+ if targetTags:
+ request['targetTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(targetTags)
+ if description:
+ request['description'] = description
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('PATCH', str(project) + '/firewalls/' + str(firewall), None, json.dumps(request))
+
+ def patch_firewall(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ """Updates the specified firewall resource with the data included in
+ the request. This method supports patch semantics.
+
+ Args:
+ firewall:
+ Name of the firewall resource to update.
+ Or: Firewall to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Firewall or a JSON
+ describing the resource.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_patch_firewall_request(
+ firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking), blocking)
+
+ def patch_firewalls(self, firewalls=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, names=None, project=None, blocking=True):
+ """Updates the specified firewall resource with the data included in
+ the request. This method supports patch semantics. List operation.
+
+ Args:
+ firewalls:
+ List of firewalls to patch.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to patch.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_patch_firewall_request(firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+ for firewall, name in gce_base.GoogleComputeEngineBase._combine(firewalls, names)], blocking)
+
+ def __build_update_firewall_request(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ # Unpacks firewall if its type is Firewall or dict.
+ if isinstance(firewall, Firewall):
+ if network is None:
+ network = firewall.network
+ if allowed is None:
+ allowed = firewall.allowed
+ if sourceRanges is None:
+ sourceRanges = firewall.sourceRanges
+ if sourceTags is None:
+ sourceTags = firewall.sourceTags
+ if targetTags is None:
+ targetTags = firewall.targetTags
+ if description is None:
+ description = firewall.description
+ if name is None:
+ name = firewall.name
+ firewall = firewall.name
+ elif isinstance(firewall, dict):
+ if network is None:
+ network = firewall.get('network')
+ if allowed is None:
+ allowed = firewall.get('allowed')
+ if sourceRanges is None:
+ sourceRanges = firewall.get('sourceRanges')
+ if sourceTags is None:
+ sourceTags = firewall.get('sourceTags')
+ if targetTags is None:
+ targetTags = firewall.get('targetTags')
+ if description is None:
+ description = firewall.get('description')
+ if name is None:
+ name = firewall.get('name')
+ firewall = firewall.get('name')
+
+ # Applies global defaults to missing values.
+ if network is None:
+ network = self.default_network
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not firewall:
+ raise ValueError('firewall is a required parameter.')
+ if not network:
+ raise ValueError('network is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#firewall',
+ 'network': (self._normalize(project, 'networks', network) if (self is not None and project is not None) else network),
+ 'name': name
+ }
+ if allowed:
+ request['allowed'] = _Allowed.array_to_json(allowed)
+ if sourceRanges:
+ request['sourceRanges'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceRanges)
+ if sourceTags:
+ request['sourceTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(sourceTags)
+ if targetTags:
+ request['targetTags'] = gce_base.GoogleComputeEngineBase._strings_to_json(targetTags)
+ if description:
+ request['description'] = description
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('PUT', str(project) + '/firewalls/' + str(firewall), None, json.dumps(request))
+
+ def update_firewall(self, firewall=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, name=None, project=None, blocking=True):
+ """Updates the specified firewall resource with the data included in
+ the request.
+
+ Args:
+ firewall:
+ Name of the firewall resource to update.
+ Or: Firewall to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Firewall or a JSON
+ describing the resource.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_update_firewall_request(
+ firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking), blocking)
+
+ def update_firewalls(self, firewalls=None, network=None, allowed=None, sourceRanges=None, sourceTags=None, targetTags=None, description=None, names=None, project=None, blocking=True):
+ """Updates the specified firewall resource with the data included in
+ the request. List operation.
+
+ Args:
+ firewalls:
+ List of firewalls to update.
+ network:
+ URL of the network to which this firewall is applied;
+ provided by the client when the firewall is created.
+ allowed:
+ The list of rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a
+ permitted connection.
+ sourceRanges:
+ A list of IP address blocks expressed in CIDR format which
+ this rule applies to. One or both of sourceRanges and
+ sourceTags may be set; an inbound connection is allowed if
+ either the range or the tag of the source matches.
+ sourceTags:
+ A list of instance tags which this rule applies to. One or
+ both of sourceRanges and sourceTags may be set; an inbound
+ connection is allowed if either the range or the tag of the
+ source matches.
+ targetTags:
+ A list of instance tags indicating sets of instances
+ located on network which may make network connections as
+ specified in allowed. If no targetTags are specified, the
+ firewall rule applies to all instances on the specified
+ network.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to update.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_update_firewall_request(firewall, network, allowed, sourceRanges, sourceTags, targetTags, description, name, project, blocking)
+ for firewall, name in gce_base.GoogleComputeEngineBase._combine(firewalls, names)], blocking)
+
+ def __build_delete_network_request(self, network=None, project=None, blocking=True):
+ # Unpacks network if its type is Network or dict.
+ if isinstance(network, Network):
+ network = network.name
+ elif isinstance(network, dict):
+ network = network.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not network:
+ raise ValueError('network is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('DELETE', str(project) + '/networks/' + str(network), None, None)
+
+ def delete_network(self, network=None, project=None, blocking=True):
+ """Deletes the specified network resource.
+
+ Args:
+ network:
+ Name of the network resource to delete.
+ Or: Network to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Network or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_delete_network_request(
+ network, project, blocking), blocking)
+
+ def delete_networks(self, networks=None, project=None, blocking=True):
+ """Deletes the specified network resource. List operation.
+
+ Args:
+ networks:
+ List of networks to delete.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_delete_network_request(network, project, blocking)
+ for network in networks], blocking)
+
+ def __build_get_network_request(self, network=None, project=None):
+ # Unpacks network if its type is Network or dict.
+ if isinstance(network, Network):
+ network = network.name
+ elif isinstance(network, dict):
+ network = network.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not network:
+ raise ValueError('network is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/networks/' + str(network), None, None)
+
+ def get_network(self, network=None, project=None):
+ """Returns the specified network resource.
+
+ Args:
+ network:
+ Name of the network resource to return.
+ Or: Network to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Network or a JSON
+ describing the resource.
+ project:
+ Name of the project scoping this request.
+
+ Returns: Network
+ """
+ return self._execute(self.__build_get_network_request(
+ network, project), False)
+
+ def get_networks(self, networks=None, project=None):
+ """Returns the specified network resource. List operation.
+
+ Args:
+ networks:
+ List of networks to get.
+ project:
+ Name of the project scoping this request.
+
+ Returns: List of Network objects.
+ """
+ return self._execute_list([
+ self.__build_get_network_request(network, project)
+ for network in networks], False)
+
+ def __build_insert_network_request(self, network=None, IPv4Range=None, gatewayIPv4=None, description=None, name=None, project=None, blocking=True):
+ # Unpacks network if its type is Network or dict.
+ if isinstance(network, Network):
+ if IPv4Range is None:
+ IPv4Range = network.IPv4Range
+ if gatewayIPv4 is None:
+ gatewayIPv4 = network.gatewayIPv4
+ if description is None:
+ description = network.description
+ if name is None:
+ name = network.name
+ elif isinstance(network, dict):
+ if IPv4Range is None:
+ IPv4Range = network.get('IPv4Range')
+ if gatewayIPv4 is None:
+ gatewayIPv4 = network.get('gatewayIPv4')
+ if description is None:
+ description = network.get('description')
+ if name is None:
+ name = network.get('name')
+ elif isinstance(network, basestring):
+ if name is not None and network != name:
+ raise ValueError('Conflicting values of network and name supplied.')
+ name = network
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not IPv4Range:
+ raise ValueError('IPv4Range is a required parameter.')
+ if not name:
+ raise ValueError('name is a required parameter.')
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#network',
+ 'IPv4Range': IPv4Range,
+ 'name': name
+ }
+ if gatewayIPv4:
+ request['gatewayIPv4'] = gatewayIPv4
+ if description:
+ request['description'] = description
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/networks', None, json.dumps(request))
+
+ def insert_network(self, network=None, IPv4Range=None, gatewayIPv4=None, description=None, name=None, project=None, blocking=True):
+ """Creates a network resource in the specified project using the
+ data included in the request.
+
+ Args:
+ network:
+ Network to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Network or a JSON
+ describing the resource.
+ IPv4Range:
+ Required; The range of internal addresses that are legal on
+ this network. This range is a CIDR specification, for
+ example: 192.168.0.0/16. Provided by the client when the
+ network is created.
+ gatewayIPv4:
+ An optional address that is used for default routing to
+ other networks. This must be within the range specified by
+ IPv4Range, and is typically the first usable address in
+ that range. If not specified, the default value is the
+ first usable address in IPv4Range.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ name:
+ Name of the resource; provided by the client when the
+ resource is created. The name must be 1-63 characters long,
+ and comply with RFC1035.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: Operation
+ """
+ return self._execute(self.__build_insert_network_request(
+ network, IPv4Range, gatewayIPv4, description, name, project, blocking), blocking)
+
+ def insert_networks(self, networks=None, IPv4Range=None, gatewayIPv4=None, description=None, names=None, project=None, blocking=True):
+ """Creates a network resource in the specified project using the
+ data included in the request. List operation.
+
+ Args:
+ networks:
+ List of networks to insert.
+ IPv4Range:
+ Required; The range of internal addresses that are legal on
+ this network. This range is a CIDR specification, for
+ example: 192.168.0.0/16. Provided by the client when the
+ network is created.
+ gatewayIPv4:
+ An optional address that is used for default routing to
+ other networks. This must be within the range specified by
+ IPv4Range, and is typically the first usable address in
+ that range. If not specified, the default value is the
+ first usable address in IPv4Range.
+ description:
+ An optional textual description of the resource; provided
+ by the client when the resource is created.
+ names:
+ List of names of objects to insert.
+ project:
+ Name of the project scoping this request.
+ blocking:
+ If True, this method will block until the operation
+ completes. This is True by default.
+
+ Returns: List of Operation objects.
+ """
+ return self._execute_list([
+ self.__build_insert_network_request(network, IPv4Range, gatewayIPv4, description, name, project, blocking)
+ for network, name in gce_base.GoogleComputeEngineBase._combine(networks, names)], blocking)
+
+ def list_networks(self, filter=None, project=None, maxResults=None, pageToken=None):
+ """Retrieves the list of network resources available to the
+ specified project.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+ maxResults:
+ Optional. Maximum count of results to be returned. Maximum
+ and default value is 100.
+ pageToken:
+ Optional. Tag returned by a previous list request truncated
+ by maxResults. Used to continue a previous list request.
+
+ Returns: NetworkList
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ if maxResults:
+ query_params['maxResults'] = maxResults
+ if pageToken:
+ query_params['pageToken'] = pageToken
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project) + '/networks', query_params, None), False)
+
+ def all_networks(self, filter=None, project=None):
+ """Returns an iterator yielding all networks in a project that match
+ specified criteria.
+
+ Args:
+ filter:
+ Optional. Filter expression for filtering listed resources.
+ project:
+ Name of the project scoping this request.
+
+ Returns: A generator of all networks.
+ """
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Processes the query parameters, if any.
+ query_params = {}
+ if filter:
+ query_params['filter'] = filter
+ return self._generate('GET', str(project) + '/networks', query_params, Network)
+
+ def __build_get_project_request(self, project=None):
+ # Unpacks project if its type is Project or dict.
+ if isinstance(project, Project):
+ project = project.name
+ elif isinstance(project, dict):
+ project = project.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+ return gce_base.GoogleComputeEngineBase.API_REQUEST('GET', str(project), None, None)
+
+ def get_project(self, project=None):
+ """Returns the specified project resource.
+
+ Args:
+ project:
+ Name of the project resource to retrieve.
+ Or: Project to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Project or a JSON
+ describing the resource.
+
+ Returns: Project
+ """
+ return self._execute(self.__build_get_project_request(
+ project), False)
+
+ def get_projects(self, projects=None):
+ """Returns the specified project resource. List operation.
+
+ Args:
+ projects:
+ List of projects to get.
+
+ Returns: List of Project objects.
+ """
+ return self._execute_list([
+ self.__build_get_project_request(project)
+ for project in projects], False)
+
+ def set_common_instance_metadata(self, project=None, items=None):
+ """Sets metadata common to all instances within the specified
+ project using the data included in the request.
+
+ Args:
+ project:
+ Name of the project scoping this request.
+ Or: Project to use as a template. Other directly provided
+ parameters take precedence and override any values in the
+ template. May be an instance of Project or a JSON
+ describing the resource.
+ items:
+ Array of key/value pairs. The total size of all keys and
+ values must be less than 512 KB.
+ """
+ # Unpacks project if its type is Project or dict.
+ if isinstance(project, Project):
+ project = project.name
+ elif isinstance(project, dict):
+ project = project.get('name')
+
+ # Applies global defaults to missing values.
+ if project is None:
+ project = self.default_project
+
+ # Ensures all required parameters are present.
+ if not project:
+ raise ValueError('project is a required parameter.')
+
+ # Creates a dict that will be sent in the request body.
+ request = {
+ 'kind': 'compute#metadata'
+ }
+ if items:
+ request['items'] = _Item.array_to_json(items)
+ return self._execute(gce_base.GoogleComputeEngineBase.API_REQUEST('POST', str(project) + '/set-common-instance-metadata', None, json.dumps(request)), False)
+
diff --git a/libcloud/common/gcelib/shortcuts.py b/libcloud/common/gcelib/shortcuts.py
new file mode 100644
index 0000000..eb49857
--- /dev/null
+++ b/libcloud/common/gcelib/shortcuts.py
@@ -0,0 +1,65 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A set of convenience functions for using Google Compute Engine."""
+
+
+def network(network_name=None, external_ip=None, use_access_config=True):
+ """Constructs a list of network interfaces for an instance.
+
+ Args:
+ network_name: The name of the network resource.
+ external_ip: An optional IPv4 address. One will be chosen if omitted.
+ use_access_config: If false, the instance will have no external address.
+
+ Returns:
+ A list containing one network interface.
+ """
+ network_interface = {'network': network_name or 'default'}
+ if use_access_config:
+ access_config = {'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}
+ if external_ip:
+ access_config['natIP'] = external_ip
+ network_interface['accessConfigs'] = [access_config]
+ return [network_interface]
+
+
+def rw_disks(disk_names):
+ disks = []
+ for disk_name in disk_names:
+ disks.append({'mode': 'READ_WRITE',
+ 'type': 'PERSISTENT',
+ 'source': disk_name})
+ return disks
+
+
+def ro_disks(disk_names):
+ disks = []
+ for disk_name in disk_names:
+ disks.append({'mode': 'READ_ONLY',
+ 'type': 'PERSISTENT',
+ 'source': disk_name})
+ return disks
+
+
+def service_accounts(scopes=None, email='default'):
+ scopes = scopes or []
+ return [{'scopes': scopes,
+ 'email': email}]
+
+
+def metadata(dictionary):
+ items = [{'key': key, 'value': value}
+ for key, value in dictionary.iteritems()]
+ return {'items': items}
diff --git a/libcloud/common/gcelib/shortcuts_test.py b/libcloud/common/gcelib/shortcuts_test.py
new file mode 100755
index 0000000..c7c1fbf
--- /dev/null
+++ b/libcloud/common/gcelib/shortcuts_test.py
@@ -0,0 +1,129 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for the shortcut functions."""
+
+import unittest
+
+from gcelib import shortcuts
+
+
+class ShortcutsTests(unittest.TestCase):
+
+ def test_network(self):
+ self.assertEqual(
+ shortcuts.network(),
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'default'}])
+
+ self.assertEqual(
+ shortcuts.network(None),
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'default'}])
+
+ self.assertEqual(
+ shortcuts.network('default'),
+ [{'accessConfigs': [{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}],
+ 'network': 'default'}])
+
+ self.assertEqual(
+ shortcuts.network('default', use_access_config=False),
+ [{'network': 'default'}])
+
+ self.assertEqual(
+ shortcuts.network('default', external_ip='123.123.123.123'),
+ [{'accessConfigs': [
+ {'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT',
+ 'natIP': '123.123.123.123'}],
+ 'network': 'default'}])
+
+ def test_rw_disks(self):
+ self.assertEqual(
+ shortcuts.rw_disks([]),
+ [])
+
+ self.assertEqual(
+ shortcuts.rw_disks(['disk1']),
+ [{'source': 'disk1',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_WRITE'}])
+
+ self.assertEqual(
+ shortcuts.rw_disks(['disk1', 'disk2']),
+ [{'source': 'disk1',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_WRITE'},
+ {'source': 'disk2',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_WRITE'}])
+
+ def test_ro_disks(self):
+ self.assertEqual(
+ shortcuts.ro_disks([]),
+ [])
+
+ self.assertEqual(
+ shortcuts.ro_disks(['disk1']),
+ [{'source': 'disk1',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_ONLY'}])
+
+ self.assertEqual(
+ shortcuts.ro_disks(['disk1', 'disk2']),
+ [{'source': 'disk1',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_ONLY'},
+ {'source': 'disk2',
+ 'type': 'PERSISTENT',
+ 'mode': 'READ_ONLY'}])
+
+ def test_service_accounts(self):
+ self.assertEqual(
+ shortcuts.service_accounts(),
+ [{'scopes': [],
+ 'email': 'default'}])
+
+ self.assertEqual(
+ shortcuts.service_accounts(['a', 'b']),
+ [{'scopes': ['a', 'b'],
+ 'email': 'default'}])
+
+ self.assertEqual(
+ shortcuts.service_accounts(['a', 'b'],
+ email='42@developer.gserviceaccount.com'),
+ [{'scopes': ['a', 'b'],
+ 'email': '42@developer.gserviceaccount.com'}])
+
+ def test_metadata(self):
+ self.assertEqual(
+ shortcuts.metadata({}),
+ {'items': []})
+
+ self.assertEqual(
+ shortcuts.metadata({'key': 'val'}),
+ {'items': [{'key': 'key', 'value': 'val'}]})
+
+ # dictionaries don't have well-defined ordering
+ generated = shortcuts.metadata({'key1': 'val1', 'key2': 'val2'})['items']
+ expected = [{'key': 'key1', 'value': 'val1'},
+ {'key': 'key2', 'value': 'val2'}]
+ self.assertEqual(
+ sorted(generated, key=lambda i: i['key']),
+ sorted(expected, key=lambda i: i['key']))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libcloud/common/gcelib/thread_pool.py b/libcloud/common/gcelib/thread_pool.py
new file mode 100644
index 0000000..d14da5a
--- /dev/null
+++ b/libcloud/common/gcelib/thread_pool.py
@@ -0,0 +1,231 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A thread pool implementation used by the Google Compute Engine library."""
+
+import collections
+import Queue
+import threading
+import time
+
+
+class TokenBucket(object):
+ """Implements the token bucket algorithm.
+
+ TokenBucket allows clients to perform a set of actions while
+ conforming to a rate limit. This class is thread safe.
+
+ Example usage:
+
+ tk = TokenBucket(capacity=10, rate=1)
+ # ...
+ while not tk.get_token():
+ # Wait for more tokens to be added to the bucket.
+ time.sleep(0.5)
+ do_something()
+ """
+
+ def __init__(self, capacity, rate, timer=None):
+ """Creates a new TokenBucket.
+
+ Args:
+ capacity: The maximum number of tokens the bucket should hold.
+ rate: The rate at which new tokens should be added to the bucket
+ in tokens/second.
+ timer: A function that returns the current time in seconds. This
+ is used for testing.
+ """
+ self._capacity = capacity
+ self._num_tokens = capacity
+ self._rate = rate
+ self._lock = threading.Lock()
+ self._time = timer or time.time
+ self._last_time = self._time()
+
+ def get_token(self):
+ """Returns True if a token is available.
+
+ If this method returns True, one token is removed from the bucket.
+
+ Returns:
+ True when a token is available.
+ """
+ self._lock.acquire()
+
+ # Updates the number of tokens in the bucket.
+ now = self._time()
+ self._num_tokens += (now - self._last_time) * self._rate
+ self._num_tokens = min(self._num_tokens, self._capacity)
+ self._last_time = now
+
+ has_token = False
+ if self._num_tokens >= 1:
+ self._num_tokens -= 1
+ has_token = True
+ self._lock.release()
+ return has_token
+
+
+class Future(object):
+ """Facilitates the passing of results from asynchronous operations."""
+
+ __NO_RES = object()
+
+ def __init__(self):
+ """Constructs a new Future."""
+ self._lock = threading.Lock()
+ self._lock.acquire()
+ self._result = Future.__NO_RES
+
+ def _set_result(self, result):
+ """Sets the return value for the operation.
+
+ Once the result is set, wait() will unblock.
+
+ Raises:
+ ValueError: If a result has already been registered with
+ this Future.
+
+ Args:
+ result: The result to associate with this Future.
+ """
+ if self._result is not Future.__NO_RES:
+ raise ValueError('The result can only be set once.')
+
+ self._result = result
+ self._lock.release()
+
+ def wait(self):
+ """Blocks until the result is available.
+
+ Returns:
+ The result.
+ """
+ self._lock.acquire()
+ return self._result
+
+
+class ThreadPool(object):
+ """A simple thread pool implementation that enforces rate limiting.
+
+ Example usage:
+
+ def my_function(x, y):
+ return x * y
+
+ pool = ThreadPool(num_threads=10, rate=1)
+ pool.start()
+ future = pool.submit(my_function, 3, y=2)
+ assert future.wait() == 6
+ pool.join()
+ """
+ # A unit of work.
+ _Work = collections.namedtuple('Work', ['future', 'func', 'args', 'kwargs'])
+
+ # States
+ _INIT = 0
+ _RUNNING = 1
+ _TERMINATING = 2
+ _TERMINATED = 3
+
+ def __init__(self, num_threads, rate):
+ """Constructs a new ThreadPool.
+
+ Args:
+ num_threads: The number of threads in the pool.
+ rate: The rate at which jobs will be invoked in jobs/second.
+ """
+ self._queue = Queue.Queue()
+ self._token_bucket = TokenBucket(num_threads, rate)
+ self._num_threads = num_threads
+ self._threads = None
+ self._state = self._INIT
+
+ def start(self):
+ """Starts the thread pool.
+
+ Raises:
+ ValueError: If the thread pool has already been started.
+ """
+ if self._state != ThreadPool._INIT:
+ raise ValueError('The thread pool has already been started.')
+
+ self._threads = []
+ for _ in xrange(self._num_threads):
+ thread = threading.Thread(target=self._worker)
+ thread.setDaemon(True)
+ self._threads.append(thread)
+ thread.start()
+ self._state = ThreadPool._RUNNING
+
+ def submit(self, func, *args, **kwargs):
+ """Submits a new job to the pool.
+
+ Args:
+ func: The function to execute.
+ *args: The positional arguments to func.
+ **kwargs: The key-word arguments to func.
+
+ Raises:
+ ValueError: If the thread pool is not running.
+
+ Returns:
+ A future that will contain the function's return value once the
+ job is executed.
+ """
+ if self._state != ThreadPool._RUNNING:
+ raise ValueError('The thread pool is not currently running.')
+
+ future = Future()
+ work = ThreadPool._Work(future, func, args, kwargs)
+ self._queue.put(work)
+ return future
+
+ def _worker(self):
+ """The main thread that each worker thread runs."""
+ while True:
+ work = self._queue.get()
+ if work is None:
+ return
+
+ while not self._token_bucket.get_token():
+ time.sleep(0.5)
+
+ try:
+ res = work.func(*work.args, **work.kwargs)
+ except BaseException as e:
+ res = e
+
+ work.future._set_result(res)
+ self._queue.task_done()
+
+ def join(self):
+ """Causes the thread pool to shutdown.
+
+ Raises:
+ ValueError: The thread pool is not currently running.
+ """
+
+ if self._state != ThreadPool._RUNNING:
+ raise ValueError('The thread pool is not running.')
+
+ self._state = ThreadPool._TERMINATING
+
+ for _ in self._threads:
+ self._queue.put(None)
+
+ for thread in self._threads:
+ thread.join()
+
+ self._state = ThreadPool._TERMINATED
diff --git a/libcloud/common/gcelib/thread_pool_test.py b/libcloud/common/gcelib/thread_pool_test.py
new file mode 100755
index 0000000..4314c83
--- /dev/null
+++ b/libcloud/common/gcelib/thread_pool_test.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for the thread_pool module."""
+
+import unittest
+
+from gcelib import thread_pool
+
+
+class TokenBucketTests(unittest.TestCase):
+ """Tests the TokenBucket."""
+
+ def setUp(self):
+ self._time = 0
+
+ def time(self):
+ """A mock time function meant to replace time.time()."""
+ self._time += 1
+ return self._time
+
+ def test_get_token(self):
+ tb = thread_pool.TokenBucket(capacity=5, rate=0.3, timer=self.time)
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+
+ self._time = 0
+ tb = thread_pool.TokenBucket(capacity=2, rate=0.5, timer=self.time)
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+
+ self._time = 0
+ tb = thread_pool.TokenBucket(capacity=10, rate=0.5, timer=self.time)
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+ self.assertFalse(tb.get_token())
+ self.assertTrue(tb.get_token())
+
+
+class FutureTests(unittest.TestCase):
+ """Tests the Future class."""
+
+ def test_edge_cases(self):
+ future = thread_pool.Future()
+ future._set_result(None)
+
+ future = thread_pool.Future()
+ future._set_result(42)
+ try:
+ future._set_result(43)
+ self.fail()
+ except ValueError:
+ pass
+
+ def test_wait(self):
+ future = thread_pool.Future()
+ future._set_result(42)
+ self.assertEqual(future.wait(), 42)
+
+
+class ThreadPoolTests(unittest.TestCase):
+ """Tests for ThreadPool."""
+
+ def test_basic(self):
+ """Ensures that the thread pool maintains consistent state."""
+ tp = thread_pool.ThreadPool(5, 10)
+ tp.start()
+ tp.join()
+
+ tp = thread_pool.ThreadPool(5, 10)
+ self.assertRaises(ValueError, tp.join)
+ tp.start()
+ tp.join()
+ self.assertRaises(ValueError, tp.start)
+
+ tp = thread_pool.ThreadPool(5, 10)
+ tp.start()
+ self.assertRaises(ValueError, tp.start)
+ self.assertRaises(ValueError, tp.start)
+ tp.join()
+ self.assertRaises(ValueError, tp.join)
+ self.assertRaises(ValueError, tp.join)
+
+ tp = thread_pool.ThreadPool(5, 10)
+ self.assertRaises(ValueError, tp.submit, self.fail)
+
+ def test_submit(self):
+ """Ensures that tasks can be submitted to the pool."""
+
+ def work_fn():
+ return 42
+
+ tp = thread_pool.ThreadPool(1, 10)
+ tp.start()
+ future = tp.submit(work_fn)
+ self.assertEqual(future.wait(), 42)
+ tp.join()
+
+ num_threads = 100
+ futures = []
+ tp = thread_pool.ThreadPool(num_threads, 10)
+ tp.start()
+ for _ in range(num_threads):
+ futures.append(tp.submit(work_fn))
+ for future in futures:
+ self.assertEqual(future.wait(), 42)
+ tp.join()
+
+
+if __name__ == '__main__':
+ unittest.main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment