Skip to content

Commit 43c190b

Browse files
authored
fix: add PROTO in streaming chunks (#1213)
b/372956316 When the row size exceeds a certain limit, the rows are divided into chunks and sent to the client in multiple parts. The client is responsible for merging these chunks to reconstruct the full row. However, for PROTO and ENUM types, this chunk-merging logic was not implemented, causing a KeyError: 13 when attempting to merge proto chunks. #### Sample to reproduce the test case [Python file](https://gist.github.com/harshachinta/95a81eeda81c422814353a5995d01e20) [proto file ](https://gist.github.com/harshachinta/fd15bf558bd4f40443411ddd164638cc) #### Steps to generate descriptors.pb and code file from proto ``` protoc --proto_path=testdata/ --include_imports --descriptor_set_out=testdata/descriptors.pb --python_out=testdata/ testdata/wrapper.proto ```
1 parent 68551c2 commit 43c190b

File tree

2 files changed

+42
-0
lines changed

2 files changed

+42
-0
lines changed

google/cloud/spanner_v1/streamed.py

+2
Original file line numberDiff line numberDiff line change
@@ -345,6 +345,8 @@ def _merge_struct(lhs, rhs, type_):
345345
TypeCode.TIMESTAMP: _merge_string,
346346
TypeCode.NUMERIC: _merge_string,
347347
TypeCode.JSON: _merge_string,
348+
TypeCode.PROTO: _merge_string,
349+
TypeCode.ENUM: _merge_string,
348350
}
349351

350352

tests/unit/test_streamed.py

+40
Original file line numberDiff line numberDiff line change
@@ -272,6 +272,46 @@ def test__merge_chunk_string_w_bytes(self):
272272
)
273273
self.assertIsNone(streamed._pending_chunk)
274274

275+
def test__merge_chunk_proto(self):
276+
from google.cloud.spanner_v1 import TypeCode
277+
278+
iterator = _MockCancellableIterator()
279+
streamed = self._make_one(iterator)
280+
FIELDS = [self._make_scalar_field("proto", TypeCode.PROTO)]
281+
streamed._metadata = self._make_result_set_metadata(FIELDS)
282+
streamed._pending_chunk = self._make_value(
283+
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA"
284+
"6fptVAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA\n"
285+
)
286+
chunk = self._make_value(
287+
"B3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0FNUExF"
288+
"MG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n"
289+
)
290+
291+
merged = streamed._merge_chunk(chunk)
292+
293+
self.assertEqual(
294+
merged.string_value,
295+
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA6fptVAAAACXBIWXMAAAsTAAAL"
296+
"EwEAmpwYAAAA\nB3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0"
297+
"FNUExFMG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n",
298+
)
299+
self.assertIsNone(streamed._pending_chunk)
300+
301+
def test__merge_chunk_enum(self):
302+
from google.cloud.spanner_v1 import TypeCode
303+
304+
iterator = _MockCancellableIterator()
305+
streamed = self._make_one(iterator)
306+
FIELDS = [self._make_scalar_field("age", TypeCode.ENUM)]
307+
streamed._metadata = self._make_result_set_metadata(FIELDS)
308+
streamed._pending_chunk = self._make_value(42)
309+
chunk = self._make_value(13)
310+
311+
merged = streamed._merge_chunk(chunk)
312+
self.assertEqual(merged.string_value, "4213")
313+
self.assertIsNone(streamed._pending_chunk)
314+
275315
def test__merge_chunk_array_of_bool(self):
276316
from google.cloud.spanner_v1 import TypeCode
277317

0 commit comments

Comments
 (0)