-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Expand file tree
/
Copy pathapi.py
More file actions
220 lines (169 loc) · 7.65 KB
/
api.py
File metadata and controls
220 lines (169 loc) · 7.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Methods for interacting with Google Cloud Datastore.
Allows interacting with the datastore via user-friendly Key, Entity and
Query objects rather than via protobufs.
"""
from gcloud.datastore import _implicit_environ
from gcloud.datastore.batch import _BATCHES
from gcloud.datastore.batch import Batch
from gcloud.datastore import helpers
def _require_dataset_id(dataset_id=None):
"""Infer a dataset ID from the environment, if not passed explicitly.
:type dataset_id: string
:param dataset_id: Optional.
:rtype: string
:returns: A dataset ID based on the current environment.
:raises: :class:`EnvironmentError` if ``dataset_id`` is ``None``,
and cannot be inferred from the environment.
"""
if dataset_id is None:
if _implicit_environ.DATASET_ID is None:
raise EnvironmentError('Dataset ID could not be inferred.')
dataset_id = _implicit_environ.DATASET_ID
return dataset_id
def _require_connection(connection=None):
"""Infer a connection from the environment, if not passed explicitly.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional.
:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection based on the current environment.
:raises: :class:`EnvironmentError` if ``connection`` is ``None``, and
cannot be inferred from the environment.
"""
if connection is None:
if _implicit_environ.CONNECTION is None:
raise EnvironmentError('Connection could not be inferred.')
connection = _implicit_environ.CONNECTION
return connection
def _get_dataset_id_from_keys(keys):
"""Determines dataset ID from a list of keys.
:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The keys from the same dataset.
:rtype: string
:returns: The dataset ID of the keys.
:raises: :class:`ValueError` if the key dataset IDs don't agree.
"""
if any(key is None for key in keys):
raise ValueError('None not allowed')
dataset_id = keys[0].dataset_id
# Rather than creating a list or set of all dataset IDs, we iterate
# and check. We could allow the backend to check this for us if IDs
# with no prefix worked (GoogleCloudPlatform/google-cloud-datastore#59)
# or if we made sure that a prefix s~ or e~ was on each key.
for key in keys[1:]:
if key.dataset_id != dataset_id:
raise ValueError('All keys in get must be from the same dataset.')
return dataset_id
def get(keys, missing=None, deferred=None, connection=None):
"""Retrieves entities, along with their attributes.
:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The keys to be retrieved from the datastore.
:type missing: an empty list or None.
:param missing: If a list is passed, the key-only entities returned
by the backend as "missing" will be copied into it.
Use only as a keyword param.
:type deferred: an empty list or None.
:param deferred: If a list is passed, the keys returned
by the backend as "deferred" will be copied into it.
Use only as a keyword param.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional. The connection used to connect to datastore.
:rtype: list of :class:`gcloud.datastore.entity.Entity`
:returns: The requested entities.
"""
if not keys:
return []
connection = _require_connection(connection)
dataset_id = _get_dataset_id_from_keys(keys)
entity_pbs = connection.lookup(
dataset_id=dataset_id,
key_pbs=[k.to_protobuf() for k in keys],
missing=missing, deferred=deferred,
)
if missing is not None:
missing[:] = [
helpers.entity_from_protobuf(missed_pb)
for missed_pb in missing]
if deferred is not None:
deferred[:] = [
helpers.key_from_protobuf(deferred_pb)
for deferred_pb in deferred]
entities = []
for entity_pb in entity_pbs:
entities.append(helpers.entity_from_protobuf(entity_pb))
return entities
def put(entities, connection=None):
"""Save the entities in the Cloud Datastore.
:type entities: list of :class:`gcloud.datastore.entity.Entity`
:param entities: The entities to be saved to the datastore.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional connection used to connect to datastore.
"""
if not entities:
return
connection = connection or _implicit_environ.CONNECTION
current = _BATCHES.top
in_batch = current is not None
if not in_batch:
keys = [entity.key for entity in entities]
dataset_id = _get_dataset_id_from_keys(keys)
current = Batch(dataset_id=dataset_id, connection=connection)
for entity in entities:
current.put(entity)
if not in_batch:
current.commit()
def delete(keys, connection=None):
"""Delete the keys in the Cloud Datastore.
:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The keys to be deleted from the datastore.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional connection used to connect to datastore.
"""
if not keys:
return
connection = connection or _implicit_environ.CONNECTION
# We allow partial keys to attempt a delete, the backend will fail.
current = _BATCHES.top
in_batch = current is not None
if not in_batch:
dataset_id = _get_dataset_id_from_keys(keys)
current = Batch(dataset_id=dataset_id, connection=connection)
for key in keys:
current.delete(key)
if not in_batch:
current.commit()
def allocate_ids(incomplete_key, num_ids, connection=None):
"""Allocates a list of IDs from a partial key.
:type incomplete_key: A :class:`gcloud.datastore.key.Key`
:param incomplete_key: Partial key to use as base for allocated IDs.
:type num_ids: integer
:param num_ids: The number of IDs to allocate.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional. The connection used to connect to datastore.
:rtype: list of :class:`gcloud.datastore.key.Key`
:returns: The (complete) keys allocated with ``incomplete_key`` as root.
:raises: :class:`ValueError` if ``incomplete_key`` is not a partial key.
"""
connection = _require_connection(connection)
if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))
incomplete_key_pb = incomplete_key.to_protobuf()
incomplete_key_pbs = [incomplete_key_pb] * num_ids
allocated_key_pbs = connection.allocate_ids(incomplete_key.dataset_id,
incomplete_key_pbs)
allocated_ids = [allocated_key_pb.path_element[-1].id
for allocated_key_pb in allocated_key_pbs]
return [incomplete_key.completed_key(allocated_id)
for allocated_id in allocated_ids]