|
370 | 370 | "This section walks through a few steps required in order to use the model in your notebook."
|
371 | 371 | ]
|
372 | 372 | },
|
373 |
| - { |
374 |
| - "cell_type": "markdown", |
375 |
| - "metadata": { |
376 |
| - "id": "rS4VO1TGiO4G" |
377 |
| - }, |
378 |
| - "source": [ |
379 |
| - "## Create a BigQuery Cloud resource connection\n", |
380 |
| - "\n", |
381 |
| - "You need to create a [Cloud resource connection](https://cloud.google.com/bigquery/docs/create-cloud-resource-connection) to enable BigQuery DataFrames to interact with Vertex AI services." |
382 |
| - ] |
383 |
| - }, |
384 |
| - { |
385 |
| - "cell_type": "code", |
386 |
| - "execution_count": null, |
387 |
| - "metadata": { |
388 |
| - "id": "KFPjDM4LVh96" |
389 |
| - }, |
390 |
| - "outputs": [], |
391 |
| - "source": [ |
392 |
| - "CONN_NAME = \"bqdf-llm\"\n", |
393 |
| - "\n", |
394 |
| - "client = bq_connection.ConnectionServiceClient()\n", |
395 |
| - "new_conn_parent = f\"projects/{PROJECT_ID}/locations/{REGION}\"\n", |
396 |
| - "exists_conn_parent = f\"projects/{PROJECT_ID}/locations/{REGION}/connections/{CONN_NAME}\"\n", |
397 |
| - "cloud_resource_properties = bq_connection.CloudResourceProperties({})\n", |
398 |
| - "\n", |
399 |
| - "try:\n", |
400 |
| - " request = client.get_connection(\n", |
401 |
| - " request=bq_connection.GetConnectionRequest(name=exists_conn_parent)\n", |
402 |
| - " )\n", |
403 |
| - " CONN_SERVICE_ACCOUNT = f\"serviceAccount:{request.cloud_resource.service_account_id}\"\n", |
404 |
| - "except Exception:\n", |
405 |
| - " connection = bq_connection.types.Connection(\n", |
406 |
| - " {\"friendly_name\": CONN_NAME, \"cloud_resource\": cloud_resource_properties}\n", |
407 |
| - " )\n", |
408 |
| - " request = bq_connection.CreateConnectionRequest(\n", |
409 |
| - " {\n", |
410 |
| - " \"parent\": new_conn_parent,\n", |
411 |
| - " \"connection_id\": CONN_NAME,\n", |
412 |
| - " \"connection\": connection,\n", |
413 |
| - " }\n", |
414 |
| - " )\n", |
415 |
| - " response = client.create_connection(request)\n", |
416 |
| - " CONN_SERVICE_ACCOUNT = (\n", |
417 |
| - " f\"serviceAccount:{response.cloud_resource.service_account_id}\"\n", |
418 |
| - " )\n", |
419 |
| - "print(CONN_SERVICE_ACCOUNT)" |
420 |
| - ] |
421 |
| - }, |
422 |
| - { |
423 |
| - "cell_type": "markdown", |
424 |
| - "metadata": { |
425 |
| - "id": "W6l6Ol2biU9h" |
426 |
| - }, |
427 |
| - "source": [ |
428 |
| - "## Set permissions for the service account\n", |
429 |
| - "\n", |
430 |
| - "The resource connection service account requires certain project-level permissions:\n", |
431 |
| - " - `roles/aiplatform.user` and `roles/bigquery.connectionUser`: These roles are required for the connection to create a model definition using the LLM model in Vertex AI ([documentation](https://cloud.google.com/bigquery/docs/generate-text#give_the_service_account_access)).\n", |
432 |
| - " - `roles/run.invoker`: This role is required for the connection to have read-only access to Cloud Run services that back custom/remote functions ([documentation](https://cloud.google.com/bigquery/docs/remote-functions#grant_permission_on_function)).\n", |
433 |
| - "\n", |
434 |
| - "Set these permissions by running the following `gcloud` commands:" |
435 |
| - ] |
436 |
| - }, |
437 |
| - { |
438 |
| - "cell_type": "code", |
439 |
| - "execution_count": null, |
440 |
| - "metadata": { |
441 |
| - "id": "d8wja24SVq6s" |
442 |
| - }, |
443 |
| - "outputs": [], |
444 |
| - "source": [ |
445 |
| - "!gcloud projects add-iam-policy-binding {PROJECT_ID} --condition=None --no-user-output-enabled --member={CONN_SERVICE_ACCOUNT} --role='roles/bigquery.connectionUser'\n", |
446 |
| - "!gcloud projects add-iam-policy-binding {PROJECT_ID} --condition=None --no-user-output-enabled --member={CONN_SERVICE_ACCOUNT} --role='roles/aiplatform.user'\n", |
447 |
| - "!gcloud projects add-iam-policy-binding {PROJECT_ID} --condition=None --no-user-output-enabled --member={CONN_SERVICE_ACCOUNT} --role='roles/run.invoker'" |
448 |
| - ] |
449 |
| - }, |
450 | 373 | {
|
451 | 374 | "cell_type": "markdown",
|
452 | 375 | "metadata": {
|
|
468 | 391 | "source": [
|
469 | 392 | "from bigframes.ml.llm import PaLM2TextGenerator\n",
|
470 | 393 | "\n",
|
471 |
| - "session = bf.get_global_session()\n", |
472 |
| - "connection = f\"{PROJECT_ID}.{REGION}.{CONN_NAME}\"\n", |
473 |
| - "model = PaLM2TextGenerator(session=session, connection_name=connection)" |
| 394 | + "model = PaLM2TextGenerator()" |
474 | 395 | ]
|
475 | 396 | },
|
476 | 397 | {
|
|
651 | 572 | },
|
652 | 573 | "outputs": [],
|
653 | 574 | "source": [
|
654 |
| - "@bf.remote_function([str], str, bigquery_connection=CONN_NAME)\n", |
| 575 | + "@bf.remote_function([str], str)\n", |
655 | 576 | "def extract_code(text: str):\n",
|
656 | 577 | " try:\n",
|
657 | 578 | " res = text[text.find('\\n')+1:text.find('```', 3)]\n",
|
|
0 commit comments