@@ -110,6 +110,24 @@ def _verify_table_definitions(self, job, config):
110
110
self .assertIsNotNone (expected_ec )
111
111
self .assertEqual (found_ec .to_api_repr (), expected_ec )
112
112
113
+ def _verify_dml_stats_resource_properties (self , job , resource ):
114
+ query_stats = resource .get ("statistics" , {}).get ("query" , {})
115
+
116
+ if "dmlStats" in query_stats :
117
+ resource_dml_stats = query_stats ["dmlStats" ]
118
+ job_dml_stats = job .dml_stats
119
+ assert str (job_dml_stats .inserted_row_count ) == resource_dml_stats .get (
120
+ "insertedRowCount" , "0"
121
+ )
122
+ assert str (job_dml_stats .updated_row_count ) == resource_dml_stats .get (
123
+ "updatedRowCount" , "0"
124
+ )
125
+ assert str (job_dml_stats .deleted_row_count ) == resource_dml_stats .get (
126
+ "deletedRowCount" , "0"
127
+ )
128
+ else :
129
+ assert job .dml_stats is None
130
+
113
131
def _verify_configuration_properties (self , job , configuration ):
114
132
if "dryRun" in configuration :
115
133
self .assertEqual (job .dry_run , configuration ["dryRun" ])
@@ -118,6 +136,7 @@ def _verify_configuration_properties(self, job, configuration):
118
136
119
137
def _verifyResourceProperties (self , job , resource ):
120
138
self ._verifyReadonlyResourceProperties (job , resource )
139
+ self ._verify_dml_stats_resource_properties (job , resource )
121
140
122
141
configuration = resource .get ("configuration" , {})
123
142
self ._verify_configuration_properties (job , configuration )
@@ -130,16 +149,19 @@ def _verifyResourceProperties(self, job, resource):
130
149
self ._verify_table_definitions (job , query_config )
131
150
132
151
self .assertEqual (job .query , query_config ["query" ])
152
+
133
153
if "createDisposition" in query_config :
134
154
self .assertEqual (job .create_disposition , query_config ["createDisposition" ])
135
155
else :
136
156
self .assertIsNone (job .create_disposition )
157
+
137
158
if "defaultDataset" in query_config :
138
159
ds_ref = job .default_dataset
139
160
ds_ref = {"projectId" : ds_ref .project , "datasetId" : ds_ref .dataset_id }
140
161
self .assertEqual (ds_ref , query_config ["defaultDataset" ])
141
162
else :
142
163
self .assertIsNone (job .default_dataset )
164
+
143
165
if "destinationTable" in query_config :
144
166
table = job .destination
145
167
tb_ref = {
@@ -150,14 +172,17 @@ def _verifyResourceProperties(self, job, resource):
150
172
self .assertEqual (tb_ref , query_config ["destinationTable" ])
151
173
else :
152
174
self .assertIsNone (job .destination )
175
+
153
176
if "priority" in query_config :
154
177
self .assertEqual (job .priority , query_config ["priority" ])
155
178
else :
156
179
self .assertIsNone (job .priority )
180
+
157
181
if "writeDisposition" in query_config :
158
182
self .assertEqual (job .write_disposition , query_config ["writeDisposition" ])
159
183
else :
160
184
self .assertIsNone (job .write_disposition )
185
+
161
186
if "destinationEncryptionConfiguration" in query_config :
162
187
self .assertIsNotNone (job .destination_encryption_configuration )
163
188
self .assertEqual (
@@ -166,6 +191,7 @@ def _verifyResourceProperties(self, job, resource):
166
191
)
167
192
else :
168
193
self .assertIsNone (job .destination_encryption_configuration )
194
+
169
195
if "schemaUpdateOptions" in query_config :
170
196
self .assertEqual (
171
197
job .schema_update_options , query_config ["schemaUpdateOptions" ]
@@ -190,6 +216,7 @@ def test_ctor_defaults(self):
190
216
self .assertIsNone (job .create_disposition )
191
217
self .assertIsNone (job .default_dataset )
192
218
self .assertIsNone (job .destination )
219
+ self .assertIsNone (job .dml_stats )
193
220
self .assertIsNone (job .flatten_results )
194
221
self .assertIsNone (job .priority )
195
222
self .assertIsNone (job .use_query_cache )
@@ -278,6 +305,26 @@ def test_from_api_repr_with_encryption(self):
278
305
self .assertIs (job ._client , client )
279
306
self ._verifyResourceProperties (job , RESOURCE )
280
307
308
+ def test_from_api_repr_with_dml_stats (self ):
309
+ self ._setUpConstants ()
310
+ client = _make_client (project = self .PROJECT )
311
+ RESOURCE = {
312
+ "id" : self .JOB_ID ,
313
+ "jobReference" : {"projectId" : self .PROJECT , "jobId" : self .JOB_ID },
314
+ "configuration" : {"query" : {"query" : self .QUERY }},
315
+ "statistics" : {
316
+ "query" : {
317
+ "dmlStats" : {"insertedRowCount" : "15" , "updatedRowCount" : "2" },
318
+ },
319
+ },
320
+ }
321
+ klass = self ._get_target_class ()
322
+
323
+ job = klass .from_api_repr (RESOURCE , client = client )
324
+
325
+ self .assertIs (job ._client , client )
326
+ self ._verifyResourceProperties (job , RESOURCE )
327
+
281
328
def test_from_api_repr_w_properties (self ):
282
329
from google .cloud .bigquery .job import CreateDisposition
283
330
from google .cloud .bigquery .job import SchemaUpdateOption
@@ -815,6 +862,23 @@ def test_estimated_bytes_processed(self):
815
862
query_stats ["estimatedBytesProcessed" ] = str (est_bytes )
816
863
self .assertEqual (job .estimated_bytes_processed , est_bytes )
817
864
865
+ def test_dml_stats (self ):
866
+ from google .cloud .bigquery .job .query import DmlStats
867
+
868
+ client = _make_client (project = self .PROJECT )
869
+ job = self ._make_one (self .JOB_ID , self .QUERY , client )
870
+ assert job .dml_stats is None
871
+
872
+ statistics = job ._properties ["statistics" ] = {}
873
+ assert job .dml_stats is None
874
+
875
+ query_stats = statistics ["query" ] = {}
876
+ assert job .dml_stats is None
877
+
878
+ query_stats ["dmlStats" ] = {"insertedRowCount" : "35" }
879
+ assert isinstance (job .dml_stats , DmlStats )
880
+ assert job .dml_stats .inserted_row_count == 35
881
+
818
882
def test_result (self ):
819
883
from google .cloud .bigquery .table import RowIterator
820
884
0 commit comments