|
| 1 | +# Copyright 2019 Google LLC |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | +# |
| 7 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +# |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | + |
| 16 | +def read_table(your_project_id): |
| 17 | + original_your_project_id = your_project_id |
| 18 | + # [START bigquerystorage_pandas_tutorial_read_session] |
| 19 | + your_project_id = "project-for-read-session" |
| 20 | + # [END bigquerystorage_pandas_tutorial_read_session] |
| 21 | + your_project_id = original_your_project_id |
| 22 | + |
| 23 | + # [START bigquerystorage_pandas_tutorial_read_session] |
| 24 | + from google.cloud import bigquery_storage |
| 25 | + from google.cloud.bigquery_storage import types |
| 26 | + import pandas |
| 27 | + |
| 28 | + bqstorageclient = bigquery_storage.BigQueryReadClient() |
| 29 | + |
| 30 | + project_id = "bigquery-public-data" |
| 31 | + dataset_id = "new_york_trees" |
| 32 | + table_id = "tree_species" |
| 33 | + table = f"projects/{project_id}/datasets/{dataset_id}/tables/{table_id}" |
| 34 | + |
| 35 | + # Select columns to read with read options. If no read options are |
| 36 | + # specified, the whole table is read. |
| 37 | + read_options = types.ReadSession.TableReadOptions( |
| 38 | + selected_fields=["species_common_name", "fall_color"] |
| 39 | + ) |
| 40 | + |
| 41 | + parent = "projects/{}".format(your_project_id) |
| 42 | + |
| 43 | + requested_session = types.ReadSession( |
| 44 | + table=table, |
| 45 | + # Avro is also supported, but the Arrow data format is optimized to |
| 46 | + # work well with column-oriented data structures such as pandas |
| 47 | + # DataFrames. |
| 48 | + data_format=types.DataFormat.ARROW, |
| 49 | + read_options=read_options, |
| 50 | + ) |
| 51 | + read_session = bqstorageclient.create_read_session( |
| 52 | + parent=parent, read_session=requested_session, max_stream_count=1, |
| 53 | + ) |
| 54 | + |
| 55 | + # This example reads from only a single stream. Read from multiple streams |
| 56 | + # to fetch data faster. Note that the session may not contain any streams |
| 57 | + # if there are no rows to read. |
| 58 | + stream = read_session.streams[0] |
| 59 | + reader = bqstorageclient.read_rows(stream.name) |
| 60 | + |
| 61 | + # Parse all Arrow blocks and create a dataframe. |
| 62 | + frames = [] |
| 63 | + for message in reader.rows().pages: |
| 64 | + frames.append(message.to_dataframe()) |
| 65 | + dataframe = pandas.concat(frames) |
| 66 | + print(dataframe.head()) |
| 67 | + # [END bigquerystorage_pandas_tutorial_read_session] |
| 68 | + |
| 69 | + return dataframe |
0 commit comments